diff --git a/__tests__/cache.test.ts b/__tests__/cache.test.ts index 38995b51b..54856ee61 100644 --- a/__tests__/cache.test.ts +++ b/__tests__/cache.test.ts @@ -65,9 +65,8 @@ describe('dependency cache', () => { }); describe('for maven', () => { - it('warns if no pom.xml found', async () => { - await restore('maven'); - expect(spyWarning).toBeCalledWith( + it('throws error if no pom.xml found', async () => { + await expect(restore('maven')).rejects.toThrowError( `No file in ${projectRoot( workspace )} matched to [**/pom.xml], make sure you have checked out the target repository` @@ -83,9 +82,8 @@ describe('dependency cache', () => { }); }); describe('for gradle', () => { - it('warns if no build.gradle found', async () => { - await restore('gradle'); - expect(spyWarning).toBeCalledWith( + it('throws error if no build.gradle found', async () => { + await expect(restore('gradle')).rejects.toThrowError( `No file in ${projectRoot( workspace )} matched to [**/*.gradle*,**/gradle-wrapper.properties], make sure you have checked out the target repository` diff --git a/dist/cleanup/index.js b/dist/cleanup/index.js index 0dcb3ec65..bbc65296e 100644 --- a/dist/cleanup/index.js +++ b/dist/cleanup/index.js @@ -1249,24 +1249,16 @@ module.exports = toComparators */ Object.defineProperty(exports, "__esModule", { value: true }); exports.PropagationAPI = void 0; -var global_utils_1 = __webpack_require__(525); var NoopTextMapPropagator_1 = __webpack_require__(918); var TextMapPropagator_1 = __webpack_require__(881); -var context_helpers_1 = __webpack_require__(483); -var utils_1 = __webpack_require__(112); -var diag_1 = __webpack_require__(118); +var global_utils_1 = __webpack_require__(525); var API_NAME = 'propagation'; -var NOOP_TEXT_MAP_PROPAGATOR = new NoopTextMapPropagator_1.NoopTextMapPropagator(); /** * Singleton object which represents the entry point to the OpenTelemetry Propagation API */ var PropagationAPI = /** @class */ (function () { /** Empty private constructor prevents end users from constructing a new instance of the API */ function PropagationAPI() { - this.createBaggage = utils_1.createBaggage; - this.getBaggage = context_helpers_1.getBaggage; - this.setBaggage = context_helpers_1.setBaggage; - this.deleteBaggage = context_helpers_1.deleteBaggage; } /** Get the singleton instance of the Propagator API */ PropagationAPI.getInstance = function () { @@ -1276,12 +1268,11 @@ var PropagationAPI = /** @class */ (function () { return this._instance; }; /** - * Set the current propagator. - * - * @returns true if the propagator was successfully registered, else false + * Set the current propagator. Returns the initialized propagator */ PropagationAPI.prototype.setGlobalPropagator = function (propagator) { - return global_utils_1.registerGlobal(API_NAME, propagator, diag_1.DiagAPI.instance()); + global_utils_1.registerGlobal(API_NAME, propagator); + return propagator; }; /** * Inject context into a carrier to be propagated inter-process @@ -1313,10 +1304,10 @@ var PropagationAPI = /** @class */ (function () { }; /** Remove the global propagator */ PropagationAPI.prototype.disable = function () { - global_utils_1.unregisterGlobal(API_NAME, diag_1.DiagAPI.instance()); + global_utils_1.unregisterGlobal(API_NAME); }; PropagationAPI.prototype._getGlobalPropagator = function () { - return global_utils_1.getGlobal(API_NAME) || NOOP_TEXT_MAP_PROPAGATOR; + return global_utils_1.getGlobal(API_NAME) || NoopTextMapPropagator_1.NOOP_TEXT_MAP_PROPAGATOR; }; return PropagationAPI; }()); @@ -1875,16 +1866,17 @@ if (typeof Symbol === undefined || !Symbol.asyncIterator) { * See the License for the specific language governing permissions and * limitations under the License. */ -var __spreadArray = (this && this.__spreadArray) || function (to, from) { - for (var i = 0, il = from.length, j = to.length; i < il; i++, j++) - to[j] = from[i]; - return to; +var __spreadArrays = (this && this.__spreadArrays) || function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.ContextAPI = void 0; var NoopContextManager_1 = __webpack_require__(425); var global_utils_1 = __webpack_require__(525); -var diag_1 = __webpack_require__(118); var API_NAME = 'context'; var NOOP_CONTEXT_MANAGER = new NoopContextManager_1.NoopContextManager(); /** @@ -1902,12 +1894,11 @@ var ContextAPI = /** @class */ (function () { return this._instance; }; /** - * Set the current context manager. - * - * @returns true if the context manager was successfully registered, else false + * Set the current context manager. Returns the initialized context manager */ ContextAPI.prototype.setGlobalContextManager = function (contextManager) { - return global_utils_1.registerGlobal(API_NAME, contextManager, diag_1.DiagAPI.instance()); + global_utils_1.registerGlobal(API_NAME, contextManager); + return contextManager; }; /** * Get the currently active context @@ -1929,16 +1920,17 @@ var ContextAPI = /** @class */ (function () { for (var _i = 3; _i < arguments.length; _i++) { args[_i - 3] = arguments[_i]; } - return (_a = this._getContextManager()).with.apply(_a, __spreadArray([context, fn, thisArg], args)); + return (_a = this._getContextManager()).with.apply(_a, __spreadArrays([context, fn, thisArg], args)); }; /** * Bind a context to a target function or event emitter * - * @param context context to bind to the event emitter or function. Defaults to the currently active context * @param target function or event emitter to bind + * @param context context to bind to the event emitter or function. Defaults to the currently active context */ - ContextAPI.prototype.bind = function (context, target) { - return this._getContextManager().bind(context, target); + ContextAPI.prototype.bind = function (target, context) { + if (context === void 0) { context = this.active(); } + return this._getContextManager().bind(target, context); }; ContextAPI.prototype._getContextManager = function () { return global_utils_1.getGlobal(API_NAME) || NOOP_CONTEXT_MANAGER; @@ -1946,7 +1938,7 @@ var ContextAPI = /** @class */ (function () { /** Disable and remove the global context manager */ ContextAPI.prototype.disable = function () { this._getContextManager().disable(); - global_utils_1.unregisterGlobal(API_NAME, diag_1.DiagAPI.instance()); + global_utils_1.unregisterGlobal(API_NAME); }; return ContextAPI; }()); @@ -3642,63 +3634,7 @@ exports.AbortSignal = AbortSignal; /* 109 */, /* 110 */, /* 111 */, -/* 112 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.baggageEntryMetadataFromString = exports.createBaggage = void 0; -var __1 = __webpack_require__(440); -var baggage_impl_1 = __webpack_require__(666); -var symbol_1 = __webpack_require__(561); -/** - * Create a new Baggage with optional entries - * - * @param entries An array of baggage entries the new baggage should contain - */ -function createBaggage(entries) { - if (entries === void 0) { entries = {}; } - return new baggage_impl_1.BaggageImpl(new Map(Object.entries(entries))); -} -exports.createBaggage = createBaggage; -/** - * Create a serializable BaggageEntryMetadata object from a string. - * - * @param str string metadata. Format is currently not defined by the spec and has no special meaning. - * - */ -function baggageEntryMetadataFromString(str) { - if (typeof str !== 'string') { - __1.diag.error("Cannot create baggage metadata from unknown type: " + typeof str); - str = ''; - } - return { - __TYPE__: symbol_1.baggageEntryMetadataSymbol, - toString: function () { - return str; - }, - }; -} -exports.baggageEntryMetadataFromString = baggageEntryMetadataFromString; -//# sourceMappingURL=utils.js.map - -/***/ }), +/* 112 */, /* 113 */, /* 114 */ /***/ (function(__unusedmodule, exports, __webpack_require__) { @@ -3947,7 +3883,6 @@ exports.saveCache = saveCache; */ Object.defineProperty(exports, "__esModule", { value: true }); exports.DiagAPI = void 0; -var ComponentLogger_1 = __webpack_require__(362); var logLevelLogger_1 = __webpack_require__(673); var types_1 = __webpack_require__(545); var global_utils_1 = __webpack_require__(525); @@ -3978,7 +3913,7 @@ var DiagAPI = /** @class */ (function () { var self = this; // DiagAPI specific functions self.setLogger = function (logger, logLevel) { - var _a, _b; + var _a; if (logLevel === void 0) { logLevel = types_1.DiagLogLevel.INFO; } if (logger === self) { // There isn't much we can do here. @@ -3986,23 +3921,12 @@ var DiagAPI = /** @class */ (function () { // Try to log to self. If a logger was previously registered it will receive the log. var err = new Error('Cannot use diag as the logger for itself. Please use a DiagLogger implementation like ConsoleDiagLogger or a custom implementation'); self.error((_a = err.stack) !== null && _a !== void 0 ? _a : err.message); - return false; - } - var oldLogger = global_utils_1.getGlobal('diag'); - var newLogger = logLevelLogger_1.createLogLevelDiagLogger(logLevel, logger); - // There already is an logger registered. We'll let it know before overwriting it. - if (oldLogger) { - var stack = (_b = new Error().stack) !== null && _b !== void 0 ? _b : ''; - oldLogger.warn("Current logger will be overwritten from " + stack); - newLogger.warn("Current logger will overwrite one already registered from " + stack); + return; } - return global_utils_1.registerGlobal('diag', newLogger, self, true); + global_utils_1.registerGlobal('diag', logLevelLogger_1.createLogLevelDiagLogger(logLevel, logger), true); }; self.disable = function () { - global_utils_1.unregisterGlobal(API_NAME, self); - }; - self.createComponentLogger = function (options) { - return new ComponentLogger_1.DiagComponentLogger(options); + global_utils_1.unregisterGlobal(API_NAME); }; self.verbose = _logProxy('verbose'); self.debug = _logProxy('debug'); @@ -4565,7 +4489,7 @@ module.exports = require("child_process"); /* 130 */, /* 131 */, /* 132 */ -/***/ (function(__unusedmodule, exports) { +/***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; @@ -4585,15 +4509,109 @@ module.exports = require("child_process"); * limitations under the License. */ Object.defineProperty(exports, "__esModule", { value: true }); -exports.ROOT_CONTEXT = exports.createContextKey = void 0; +exports.ROOT_CONTEXT = exports.createContextKey = exports.setBaggage = exports.getBaggage = exports.isInstrumentationSuppressed = exports.unsuppressInstrumentation = exports.suppressInstrumentation = exports.getSpanContext = exports.setSpanContext = exports.setSpan = exports.getSpan = void 0; +var NoopSpan_1 = __webpack_require__(767); +/** + * span key + */ +var SPAN_KEY = createContextKey('OpenTelemetry Context Key SPAN'); +/** + * Shared key for indicating if instrumentation should be suppressed beyond + * this current scope. + */ +var SUPPRESS_INSTRUMENTATION_KEY = createContextKey('OpenTelemetry Context Key SUPPRESS_INSTRUMENTATION'); +/** + * Baggage key + */ +var BAGGAGE_KEY = createContextKey('OpenTelemetry Baggage Key'); +/** + * Return the span if one exists + * + * @param context context to get span from + */ +function getSpan(context) { + return context.getValue(SPAN_KEY) || undefined; +} +exports.getSpan = getSpan; +/** + * Set the span on a context + * + * @param context context to use as parent + * @param span span to set active + */ +function setSpan(context, span) { + return context.setValue(SPAN_KEY, span); +} +exports.setSpan = setSpan; +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context context to set active span on + * @param spanContext span context to be wrapped + */ +function setSpanContext(context, spanContext) { + return setSpan(context, new NoopSpan_1.NoopSpan(spanContext)); +} +exports.setSpanContext = setSpanContext; +/** + * Get the span context of the span if it exists. + * + * @param context context to get values from + */ +function getSpanContext(context) { + var _a; + return (_a = getSpan(context)) === null || _a === void 0 ? void 0 : _a.context(); +} +exports.getSpanContext = getSpanContext; +/** + * Sets value on context to indicate that instrumentation should + * be suppressed beyond this current scope. + * + * @param context context to set the suppress instrumentation value on. + */ +function suppressInstrumentation(context) { + return context.setValue(SUPPRESS_INSTRUMENTATION_KEY, true); +} +exports.suppressInstrumentation = suppressInstrumentation; +/** + * Sets value on context to indicate that instrumentation should + * no-longer be suppressed beyond this current scope. + * + * @param context context to set the suppress instrumentation value on. + */ +function unsuppressInstrumentation(context) { + return context.setValue(SUPPRESS_INSTRUMENTATION_KEY, false); +} +exports.unsuppressInstrumentation = unsuppressInstrumentation; +/** + * Return current suppress instrumentation value for the given context, + * if it exists. + * + * @param context context check for the suppress instrumentation value. + */ +function isInstrumentationSuppressed(context) { + return Boolean(context.getValue(SUPPRESS_INSTRUMENTATION_KEY)); +} +exports.isInstrumentationSuppressed = isInstrumentationSuppressed; +/** + * @param {Context} Context that manage all context values + * @returns {Baggage} Extracted baggage from the context + */ +function getBaggage(context) { + return context.getValue(BAGGAGE_KEY) || undefined; +} +exports.getBaggage = getBaggage; +/** + * @param {Context} Context that manage all context values + * @param {Baggage} baggage that will be set in the actual context + */ +function setBaggage(context, baggage) { + return context.setValue(BAGGAGE_KEY, baggage); +} +exports.setBaggage = setBaggage; /** Get a key to uniquely identify a context value */ function createContextKey(description) { - // The specification states that for the same input, multiple calls should - // return different keys. Due to the nature of the JS dependency management - // system, this creates problems where multiple versions of some package - // could hold different keys for the same property. - // - // Therefore, we use Symbol.for which returns the same key for the same input. return Symbol.for(description); } exports.createContextKey = createContextKey; @@ -4649,7 +4667,7 @@ exports.ROOT_CONTEXT = new BaseContext(); Object.defineProperty(exports, "__esModule", { value: true }); exports.VERSION = void 0; // this is autogenerated file, see scripts/version-update.js -exports.VERSION = '1.0.2'; +exports.VERSION = '1.0.0-rc.0'; //# sourceMappingURL=version.js.map /***/ }), @@ -4962,7 +4980,318 @@ module.exports = parseOptions /***/ }), -/* 144 */, +/* 144 */ +/***/ (function(module) { + +/*! ***************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, System, Reflect, Promise */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __createBinding; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if ( true && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || from); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); +}); + + +/***/ }), /* 145 */ /***/ (function(__unusedmodule, exports) { @@ -5048,10 +5377,9 @@ exports.fromPromise = function (fn) { * limitations under the License. */ Object.defineProperty(exports, "__esModule", { value: true }); -exports.NoopTracer = void 0; -var __1 = __webpack_require__(440); -var context_utils_1 = __webpack_require__(720); -var NonRecordingSpan_1 = __webpack_require__(437); +exports.NOOP_TRACER = exports.NoopTracer = void 0; +var context_1 = __webpack_require__(132); +var NoopSpan_1 = __webpack_require__(767); var spancontext_utils_1 = __webpack_require__(629); /** * No-op implementations of {@link Tracer}. @@ -5063,40 +5391,16 @@ var NoopTracer = /** @class */ (function () { NoopTracer.prototype.startSpan = function (name, options, context) { var root = Boolean(options === null || options === void 0 ? void 0 : options.root); if (root) { - return new NonRecordingSpan_1.NonRecordingSpan(); + return new NoopSpan_1.NoopSpan(); } - var parentFromContext = context && context_utils_1.getSpanContext(context); + var parentFromContext = context && context_1.getSpanContext(context); if (isSpanContext(parentFromContext) && spancontext_utils_1.isSpanContextValid(parentFromContext)) { - return new NonRecordingSpan_1.NonRecordingSpan(parentFromContext); - } - else { - return new NonRecordingSpan_1.NonRecordingSpan(); - } - }; - NoopTracer.prototype.startActiveSpan = function (name, arg2, arg3, arg4) { - var opts; - var ctx; - var fn; - if (arguments.length < 2) { - return; - } - else if (arguments.length === 2) { - fn = arg2; - } - else if (arguments.length === 3) { - opts = arg2; - fn = arg3; + return new NoopSpan_1.NoopSpan(parentFromContext); } else { - opts = arg2; - ctx = arg3; - fn = arg4; + return new NoopSpan_1.NoopSpan(); } - var parentContext = ctx !== null && ctx !== void 0 ? ctx : __1.context.active(); - var span = this.startSpan(name, opts, parentContext); - var contextWithSpanSet = context_utils_1.setSpan(parentContext, span); - return __1.context.with(contextWithSpanSet, fn, undefined, span); }; return NoopTracer; }()); @@ -5107,6 +5411,7 @@ function isSpanContext(spanContext) { typeof spanContext['traceId'] === 'string' && typeof spanContext['traceFlags'] === 'number'); } +exports.NOOP_TRACER = new NoopTracer(); //# sourceMappingURL=NoopTracer.js.map /***/ }), @@ -5341,7 +5646,7 @@ Object.defineProperty(exports, "__esModule", { value: true }); * limitations under the License. */ Object.defineProperty(exports, "__esModule", { value: true }); -exports.NoopTracerProvider = void 0; +exports.NOOP_TRACER_PROVIDER = exports.NoopTracerProvider = void 0; var NoopTracer_1 = __webpack_require__(151); /** * An implementation of the {@link TracerProvider} which returns an impotent @@ -5353,11 +5658,12 @@ var NoopTracerProvider = /** @class */ (function () { function NoopTracerProvider() { } NoopTracerProvider.prototype.getTracer = function (_name, _version) { - return new NoopTracer_1.NoopTracer(); + return NoopTracer_1.NOOP_TRACER; }; return NoopTracerProvider; }()); exports.NoopTracerProvider = NoopTracerProvider; +exports.NOOP_TRACER_PROVIDER = new NoopTracerProvider(); //# sourceMappingURL=NoopTracerProvider.js.map /***/ }), @@ -7729,7 +8035,7 @@ var __createBinding; ar[i] = from[i]; } } - return to.concat(ar || Array.prototype.slice.call(from)); + return to.concat(ar || from); }; __await = function (v) { @@ -7835,6 +8141,205 @@ var __createBinding; Object.defineProperty(exports, '__esModule', { value: true }); var api = __webpack_require__(440); +var tslib = __webpack_require__(144); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A no-op implementation of Span that can safely be used without side-effects. + */ +var NoOpSpan = /** @class */ (function () { + function NoOpSpan() { + } + /** + * Returns the SpanContext associated with this Span. + */ + NoOpSpan.prototype.context = function () { + return { + spanId: "", + traceId: "", + traceFlags: 0 /* NONE */ + }; + }; + /** + * Marks the end of Span execution. + * @param _endTime - The time to use as the Span's end time. Defaults to + * the current time. + */ + NoOpSpan.prototype.end = function (_endTime) { + /* Noop */ + }; + /** + * Sets an attribute on the Span + * @param _key - The attribute key + * @param _value - The attribute value + */ + NoOpSpan.prototype.setAttribute = function (_key, _value) { + return this; + }; + /** + * Sets attributes on the Span + * @param _attributes - The attributes to add + */ + NoOpSpan.prototype.setAttributes = function (_attributes) { + return this; + }; + /** + * Adds an event to the Span + * @param _name - The name of the event + * @param _attributes - The associated attributes to add for this event + */ + NoOpSpan.prototype.addEvent = function (_name, _attributes) { + return this; + }; + /** + * Sets a status on the span. Overrides the default of SpanStatusCode.OK. + * @param _status - The status to set. + */ + NoOpSpan.prototype.setStatus = function (_status) { + return this; + }; + /** + * Updates the name of the Span + * @param _name - the new Span name + */ + NoOpSpan.prototype.updateName = function (_name) { + return this; + }; + /** + * Returns whether this span will be recorded + */ + NoOpSpan.prototype.isRecording = function () { + return false; + }; + /** + * Sets exception as a span event + * @param exception - the exception the only accepted values are string or Error + * @param time - the time to set as Span's event time. If not provided, + * use the current time. + */ + NoOpSpan.prototype.recordException = function (_exception, _time) { + /* do nothing */ + }; + return NoOpSpan; +}()); + +// Copyright (c) Microsoft Corporation. +/** + * A no-op implementation of Tracer that can be used when tracing + * is disabled. + */ +var NoOpTracer = /** @class */ (function () { + function NoOpTracer() { + } + /** + * Starts a new Span. + * @param _name - The name of the span. + * @param _options - The SpanOptions used during Span creation. + */ + NoOpTracer.prototype.startSpan = function (_name, _options) { + return new NoOpSpan(); + }; + /** + * Returns the current Span from the current context, if available. + */ + NoOpTracer.prototype.getCurrentSpan = function () { + return new NoOpSpan(); + }; + /** + * Executes the given function within the context provided by a Span. + * @param _span - The span that provides the context. + * @param fn - The function to be executed. + */ + NoOpTracer.prototype.withSpan = function (_span, fn) { + return fn(); + }; + /** + * Bind a Span as the target's scope + * @param target - An object to bind the scope. + * @param _span - A specific Span to use. Otherwise, use the current one. + */ + NoOpTracer.prototype.bind = function (target, _span) { + return target; + }; + return NoOpTracer; +}()); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +function getGlobalObject() { + return global; +} + +// Copyright (c) Microsoft Corporation. +// V1 = OpenTelemetry 0.1 +// V2 = OpenTelemetry 0.2 +// V3 = OpenTelemetry 0.6.1 +// V4 = OpenTelemetry 1.0.0-rc.0 +var GLOBAL_TRACER_VERSION = 4; +// preview5 shipped with @azure/core-tracing.tracerCache +// and didn't have smart detection for collisions +var GLOBAL_TRACER_SYMBOL = Symbol.for("@azure/core-tracing.tracerCache3"); +var cache; +function loadTracerCache() { + var globalObj = getGlobalObject(); + var existingCache = globalObj[GLOBAL_TRACER_SYMBOL]; + var setGlobalCache = true; + if (existingCache) { + if (existingCache.version === GLOBAL_TRACER_VERSION) { + cache = existingCache; + } + else { + setGlobalCache = false; + if (existingCache.tracer) { + throw new Error("Two incompatible versions of @azure/core-tracing have been loaded.\n This library is " + GLOBAL_TRACER_VERSION + ", existing is " + existingCache.version + "."); + } + } + } + if (!cache) { + cache = { + tracer: undefined, + version: GLOBAL_TRACER_VERSION + }; + } + if (setGlobalCache) { + globalObj[GLOBAL_TRACER_SYMBOL] = cache; + } +} +function getCache() { + if (!cache) { + loadTracerCache(); + } + return cache; +} + +// Copyright (c) Microsoft Corporation. +var defaultTracer; +function getDefaultTracer() { + if (!defaultTracer) { + defaultTracer = new NoOpTracer(); + } + return defaultTracer; +} +/** + * Sets the global tracer, enabling tracing for the Azure SDK. + * @param tracer - An OpenTelemetry Tracer instance. + */ +function setTracer(tracer) { + var cache = getCache(); + cache.tracer = tracer; +} +/** + * Retrieves the active tracer, or returns a + * no-op implementation if one is not set. + */ +function getTracer() { + var cache = getCache(); + if (!cache.tracer) { + return getDefaultTracer(); + } + return cache.tracer; +} // Copyright (c) Microsoft Corporation. (function (SpanKind) { @@ -7869,7 +8374,7 @@ var api = __webpack_require__(440); * @param context - context to get span from */ function getSpan(context) { - return api.trace.getSpan(context); + return api.getSpan(context); } /** * Set the span on a context @@ -7878,7 +8383,7 @@ function getSpan(context) { * @param span - span to set active */ function setSpan(context, span) { - return api.trace.setSpan(context, span); + return api.setSpan(context, span); } /** * Wrap span context in a NoopSpan and set as span in a new @@ -7888,7 +8393,7 @@ function setSpan(context, span) { * @param spanContext - span context to be wrapped */ function setSpanContext(context, spanContext) { - return api.trace.setSpanContext(context, spanContext); + return api.setSpanContext(context, spanContext); } /** * Get the span context of the span if it exists. @@ -7896,24 +8401,10 @@ function setSpanContext(context, spanContext) { * @param context - context to get values from */ function getSpanContext(context) { - return api.trace.getSpanContext(context); -} -/** - * Returns true of the given {@link SpanContext} is valid. - * A valid {@link SpanContext} is one which has a valid trace ID and span ID as per the spec. - * - * @param context - the {@link SpanContext} to validate. - * - * @returns true if the {@link SpanContext} is valid, false otherwise. - */ -function isSpanContextValid(context) { - return api.trace.isSpanContextValid(context); -} -function getTracer(name, version) { - return api.trace.getTracer(name || "azure/core-tracing", version); + return api.getSpanContext(context); } /** Entrypoint for context API */ -const context = api.context; +var context = api.context; (function (SpanStatusCode) { /** * The default status. @@ -7931,18 +8422,422 @@ const context = api.context; })(exports.SpanStatusCode || (exports.SpanStatusCode = {})); // Copyright (c) Microsoft Corporation. -function isTracingDisabled() { - var _a; - if (typeof process === "undefined") { - // not supported in browser for now without polyfills - return false; - } - const azureTracingDisabledValue = (_a = process.env.AZURE_TRACING_DISABLED) === null || _a === void 0 ? void 0 : _a.toLowerCase(); - if (azureTracingDisabledValue === "false" || azureTracingDisabledValue === "0") { - return false; +// Licensed under the MIT license. +/** + * @internal + */ +var OpenCensusTraceStateWrapper = /** @class */ (function () { + function OpenCensusTraceStateWrapper(state) { + this._state = state; } - return Boolean(azureTracingDisabledValue); + OpenCensusTraceStateWrapper.prototype.get = function (_key) { + throw new Error("Method not implemented."); + }; + OpenCensusTraceStateWrapper.prototype.set = function (_key, _value) { + throw new Error("Method not implemented."); + }; + OpenCensusTraceStateWrapper.prototype.unset = function (_key) { + throw new Error("Method not implemented"); + }; + OpenCensusTraceStateWrapper.prototype.serialize = function () { + return this._state || ""; + }; + return OpenCensusTraceStateWrapper; +}()); + +// Copyright (c) Microsoft Corporation. +/** An enumeration of canonical status codes. */ +var CanonicalCode; +(function (CanonicalCode) { + /** + * Not an error; returned on success + */ + CanonicalCode[CanonicalCode["OK"] = 0] = "OK"; + /** + * Internal errors. Means some invariants expected by underlying + * system has been broken. If you see one of these errors, + * something is very broken. + */ + CanonicalCode[CanonicalCode["INTERNAL"] = 13] = "INTERNAL"; +})(CanonicalCode || (CanonicalCode = {})); +function isWrappedSpan(span) { + return !!span && span.getWrappedSpan !== undefined; +} +function isTracer(tracerOrSpan) { + return tracerOrSpan.getWrappedTracer !== undefined; } +/** + * An implementation of OpenTelemetry Span that wraps an OpenCensus Span. + */ +var OpenCensusSpanWrapper = /** @class */ (function () { + function OpenCensusSpanWrapper(tracerOrSpan, name, options, context$1) { + if (name === void 0) { name = ""; } + if (options === void 0) { options = {}; } + if (isTracer(tracerOrSpan)) { + var span = getSpan(context$1 !== null && context$1 !== void 0 ? context$1 : context.active()); + var parent = isWrappedSpan(span) ? span.getWrappedSpan() : undefined; + this._span = tracerOrSpan.getWrappedTracer().startChildSpan({ + name: name, + childOf: parent + }); + this._span.start(); + if (options.links) { + for (var _i = 0, _a = options.links; _i < _a.length; _i++) { + var link = _a[_i]; + // Since there is no way to set the link relationship, leave it as Unspecified. + this._span.addLink(link.context.traceId, link.context.spanId, 0 /* LinkType.UNSPECIFIED */, link.attributes); + } + } + } + else { + this._span = tracerOrSpan; + } + } + /** + * The underlying OpenCensus Span + */ + OpenCensusSpanWrapper.prototype.getWrappedSpan = function () { + return this._span; + }; + /** + * Marks the end of Span execution. + * @param endTime - The time to use as the Span's end time. Defaults to + * the current time. + */ + OpenCensusSpanWrapper.prototype.end = function (_endTime) { + this._span.end(); + }; + /** + * Returns the SpanContext associated with this Span. + */ + OpenCensusSpanWrapper.prototype.context = function () { + var openCensusSpanContext = this._span.spanContext; + return { + spanId: openCensusSpanContext.spanId, + traceId: openCensusSpanContext.traceId, + traceFlags: openCensusSpanContext.options, + traceState: new OpenCensusTraceStateWrapper(openCensusSpanContext.traceState) + }; + }; + /** + * Sets an attribute on the Span + * @param key - The attribute key + * @param value - The attribute value + */ + OpenCensusSpanWrapper.prototype.setAttribute = function (key, value) { + this._span.addAttribute(key, value); + return this; + }; + /** + * Sets attributes on the Span + * @param attributes - The attributes to add + */ + OpenCensusSpanWrapper.prototype.setAttributes = function (attributes) { + this._span.attributes = attributes; + return this; + }; + /** + * Adds an event to the Span + * @param name - The name of the event + * @param attributes - The associated attributes to add for this event + */ + OpenCensusSpanWrapper.prototype.addEvent = function (_name, _attributes) { + throw new Error("Method not implemented."); + }; + /** + * Sets a status on the span. Overrides the default of SpanStatusCode.OK. + * @param status - The status to set. + */ + OpenCensusSpanWrapper.prototype.setStatus = function (status) { + switch (status.code) { + case exports.SpanStatusCode.ERROR: { + this._span.setStatus(CanonicalCode.INTERNAL, status.message); + break; + } + case exports.SpanStatusCode.OK: { + this._span.setStatus(CanonicalCode.OK, status.message); + break; + } + case exports.SpanStatusCode.UNSET: { + break; + } + } + return this; + }; + /** + * Updates the name of the Span + * @param name - The new Span name + */ + OpenCensusSpanWrapper.prototype.updateName = function (name) { + this._span.name = name; + return this; + }; + /** + * Returns whether this span will be recorded + */ + OpenCensusSpanWrapper.prototype.isRecording = function () { + // NoRecordSpans have an empty traceId + return !!this._span.traceId; + }; + /** + * Sets exception as a span event + * @param exception - the exception the only accepted values are string or Error + * @param time - the time to set as Span's event time. If not provided, + * use the current time. + */ + OpenCensusSpanWrapper.prototype.recordException = function (_exception, _time) { + throw new Error("Method not implemented"); + }; + return OpenCensusSpanWrapper; +}()); + +// Copyright (c) Microsoft Corporation. +/** + * An implementation of OpenTelemetry Tracer that wraps an OpenCensus Tracer. + */ +var OpenCensusTracerWrapper = /** @class */ (function () { + /** + * Create a new wrapper around a given OpenCensus Tracer. + * @param tracer - The OpenCensus Tracer to wrap. + */ + function OpenCensusTracerWrapper(tracer) { + this._tracer = tracer; + } + /** + * The wrapped OpenCensus Tracer + */ + OpenCensusTracerWrapper.prototype.getWrappedTracer = function () { + return this._tracer; + }; + /** + * Starts a new Span. + * @param name - The name of the span. + * @param options - The SpanOptions used during Span creation. + */ + OpenCensusTracerWrapper.prototype.startSpan = function (name, options) { + return new OpenCensusSpanWrapper(this, name, options); + }; + /** + * Returns the current Span from the current context, if available. + */ + OpenCensusTracerWrapper.prototype.getCurrentSpan = function () { + return undefined; + }; + /** + * Executes the given function within the context provided by a Span. + * @param _span - The span that provides the context. + * @param _fn - The function to be executed. + */ + OpenCensusTracerWrapper.prototype.withSpan = function (_span, _fn) { + throw new Error("Method not implemented."); + }; + /** + * Bind a Span as the target's scope + * @param target - An object to bind the scope. + * @param _span - A specific Span to use. Otherwise, use the current one. + */ + OpenCensusTracerWrapper.prototype.bind = function (_target, _span) { + throw new Error("Method not implemented."); + }; + return OpenCensusTracerWrapper; +}()); + +// Copyright (c) Microsoft Corporation. +/** + * A mock span useful for testing. + */ +var TestSpan = /** @class */ (function (_super) { + tslib.__extends(TestSpan, _super); + /** + * Starts a new Span. + * @param parentTracer- The tracer that created this Span + * @param name - The name of the span. + * @param context - The SpanContext this span belongs to + * @param kind - The SpanKind of this Span + * @param parentSpanId - The identifier of the parent Span + * @param startTime - The startTime of the event (defaults to now) + */ + function TestSpan(parentTracer, name, context, kind, parentSpanId, startTime) { + if (startTime === void 0) { startTime = Date.now(); } + var _this = _super.call(this) || this; + _this._tracer = parentTracer; + _this.name = name; + _this.kind = kind; + _this.startTime = startTime; + _this.parentSpanId = parentSpanId; + _this.status = { + code: exports.SpanStatusCode.OK + }; + _this.endCalled = false; + _this._context = context; + _this.attributes = {}; + return _this; + } + /** + * Returns the Tracer that created this Span + */ + TestSpan.prototype.tracer = function () { + return this._tracer; + }; + /** + * Returns the SpanContext associated with this Span. + */ + TestSpan.prototype.context = function () { + return this._context; + }; + /** + * Marks the end of Span execution. + * @param _endTime - The time to use as the Span's end time. Defaults to + * the current time. + */ + TestSpan.prototype.end = function (_endTime) { + this.endCalled = true; + }; + /** + * Sets a status on the span. Overrides the default of SpanStatusCode.OK. + * @param status - The status to set. + */ + TestSpan.prototype.setStatus = function (status) { + this.status = status; + return this; + }; + /** + * Returns whether this span will be recorded + */ + TestSpan.prototype.isRecording = function () { + return true; + }; + /** + * Sets an attribute on the Span + * @param key - The attribute key + * @param value - The attribute value + */ + TestSpan.prototype.setAttribute = function (key, value) { + this.attributes[key] = value; + return this; + }; + /** + * Sets attributes on the Span + * @param attributes - The attributes to add + */ + TestSpan.prototype.setAttributes = function (attributes) { + for (var _i = 0, _a = Object.keys(attributes); _i < _a.length; _i++) { + var key = _a[_i]; + this.attributes[key] = attributes[key]; + } + return this; + }; + return TestSpan; +}(NoOpSpan)); + +// Copyright (c) Microsoft Corporation. +/** + * A mock tracer useful for testing + */ +var TestTracer = /** @class */ (function (_super) { + tslib.__extends(TestTracer, _super); + function TestTracer() { + var _this = _super !== null && _super.apply(this, arguments) || this; + _this.traceIdCounter = 0; + _this.spanIdCounter = 0; + _this.rootSpans = []; + _this.knownSpans = []; + return _this; + } + TestTracer.prototype.getNextTraceId = function () { + this.traceIdCounter++; + return String(this.traceIdCounter); + }; + TestTracer.prototype.getNextSpanId = function () { + this.spanIdCounter++; + return String(this.spanIdCounter); + }; + /** + * Returns all Spans that were created without a parent + */ + TestTracer.prototype.getRootSpans = function () { + return this.rootSpans; + }; + /** + * Returns all Spans this Tracer knows about + */ + TestTracer.prototype.getKnownSpans = function () { + return this.knownSpans; + }; + /** + * Returns all Spans where end() has not been called + */ + TestTracer.prototype.getActiveSpans = function () { + return this.knownSpans.filter(function (span) { + return !span.endCalled; + }); + }; + /** + * Return all Spans for a particular trace, grouped by their + * parent Span in a tree-like structure + * @param traceId - The traceId to return the graph for + */ + TestTracer.prototype.getSpanGraph = function (traceId) { + var traceSpans = this.knownSpans.filter(function (span) { + return span.context().traceId === traceId; + }); + var roots = []; + var nodeMap = new Map(); + for (var _i = 0, traceSpans_1 = traceSpans; _i < traceSpans_1.length; _i++) { + var span = traceSpans_1[_i]; + var spanId = span.context().spanId; + var node = { + name: span.name, + children: [] + }; + nodeMap.set(spanId, node); + if (span.parentSpanId) { + var parent = nodeMap.get(span.parentSpanId); + if (!parent) { + throw new Error("Span with name " + node.name + " has an unknown parentSpan with id " + span.parentSpanId); + } + parent.children.push(node); + } + else { + roots.push(node); + } + } + return { + roots: roots + }; + }; + /** + * Starts a new Span. + * @param name - The name of the span. + * @param options - The SpanOptions used during Span creation. + */ + TestTracer.prototype.startSpan = function (name, options, context$1) { + var parentContext = getSpanContext(context$1 || context.active()); + var traceId; + var isRootSpan = false; + if (parentContext && parentContext.traceId) { + traceId = parentContext.traceId; + } + else { + traceId = this.getNextTraceId(); + isRootSpan = true; + } + var spanContext = { + traceId: traceId, + spanId: this.getNextSpanId(), + traceFlags: 0 /* NONE */ + }; + var span = new TestSpan(this, name, spanContext, (options === null || options === void 0 ? void 0 : options.kind) || exports.SpanKind.INTERNAL, parentContext ? parentContext.spanId : undefined, options === null || options === void 0 ? void 0 : options.startTime); + this.knownSpans.push(span); + if (isRootSpan) { + this.rootSpans.push(span); + } + return span; + }; + return TestTracer; +}(NoOpTracer)); + +// Copyright (c) Microsoft Corporation. /** * Creates a function that can be used to create spans using the global tracer. * @@ -7963,28 +8858,22 @@ function isTracingDisabled() { */ function createSpanFunction(args) { return function (operationName, operationOptions) { - const tracer = getTracer(); - const tracingOptions = (operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) || {}; - const spanOptions = Object.assign({ kind: exports.SpanKind.INTERNAL }, tracingOptions.spanOptions); - const spanName = args.packagePrefix ? `${args.packagePrefix}.${operationName}` : operationName; - let span; - if (isTracingDisabled()) { - span = api.trace.wrapSpanContext(api.INVALID_SPAN_CONTEXT); - } - else { - span = tracer.startSpan(spanName, spanOptions, tracingOptions.tracingContext); - } + var tracer = getTracer(); + var tracingOptions = (operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) || {}; + var spanOptions = tslib.__assign({ kind: exports.SpanKind.INTERNAL }, tracingOptions.spanOptions); + var spanName = args.packagePrefix ? args.packagePrefix + "." + operationName : operationName; + var span = tracer.startSpan(spanName, spanOptions, tracingOptions.tracingContext); if (args.namespace) { span.setAttribute("az.namespace", args.namespace); } - let newSpanOptions = tracingOptions.spanOptions || {}; + var newSpanOptions = tracingOptions.spanOptions || {}; if (span.isRecording() && args.namespace) { - newSpanOptions = Object.assign(Object.assign({}, tracingOptions.spanOptions), { attributes: Object.assign(Object.assign({}, spanOptions.attributes), { "az.namespace": args.namespace }) }); + newSpanOptions = tslib.__assign(tslib.__assign({}, tracingOptions.spanOptions), { attributes: tslib.__assign(tslib.__assign({}, spanOptions.attributes), { "az.namespace": args.namespace }) }); } - const newTracingOptions = Object.assign(Object.assign({}, tracingOptions), { spanOptions: newSpanOptions, tracingContext: setSpan(tracingOptions.tracingContext || context.active(), span) }); - const newOperationOptions = Object.assign(Object.assign({}, operationOptions), { tracingOptions: newTracingOptions }); + var newTracingOptions = tslib.__assign(tslib.__assign({}, tracingOptions), { spanOptions: newSpanOptions, tracingContext: setSpan(tracingOptions.tracingContext || context.active(), span) }); + var newOperationOptions = tslib.__assign(tslib.__assign({}, operationOptions), { tracingOptions: newTracingOptions }); return { - span, + span: span, updatedOptions: newOperationOptions }; }; @@ -7992,26 +8881,26 @@ function createSpanFunction(args) { // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -const VERSION = "00"; +var VERSION = "00"; /** * Generates a `SpanContext` given a `traceparent` header value. * @param traceParent - Serialized span context data as a `traceparent` header value. * @returns The `SpanContext` generated from the `traceparent` value. */ function extractSpanContextFromTraceParentHeader(traceParentHeader) { - const parts = traceParentHeader.split("-"); + var parts = traceParentHeader.split("-"); if (parts.length !== 4) { return; } - const [version, traceId, spanId, traceOptions] = parts; + var version = parts[0], traceId = parts[1], spanId = parts[2], traceOptions = parts[3]; if (version !== VERSION) { return; } - const traceFlags = parseInt(traceOptions, 16); - const spanContext = { - spanId, - traceId, - traceFlags + var traceFlags = parseInt(traceOptions, 16); + var spanContext = { + spanId: spanId, + traceId: traceId, + traceFlags: traceFlags }; return spanContext; } @@ -8021,7 +8910,7 @@ function extractSpanContextFromTraceParentHeader(traceParentHeader) { * @returns The `spanContext` represented as a `traceparent` value. */ function getTraceParentHeader(spanContext) { - const missingFields = []; + var missingFields = []; if (!spanContext.traceId) { missingFields.push("traceId"); } @@ -8031,13 +8920,19 @@ function getTraceParentHeader(spanContext) { if (missingFields.length) { return; } - const flags = spanContext.traceFlags || 0 /* NONE */; - const hexFlags = flags.toString(16); - const traceFlags = hexFlags.length === 1 ? `0${hexFlags}` : hexFlags; + var flags = spanContext.traceFlags || 0 /* NONE */; + var hexFlags = flags.toString(16); + var traceFlags = hexFlags.length === 1 ? "0" + hexFlags : hexFlags; // https://www.w3.org/TR/trace-context/#traceparent-header-field-values - return `${VERSION}-${spanContext.traceId}-${spanContext.spanId}-${traceFlags}`; + return VERSION + "-" + spanContext.traceId + "-" + spanContext.spanId + "-" + traceFlags; } +exports.NoOpSpan = NoOpSpan; +exports.NoOpTracer = NoOpTracer; +exports.OpenCensusSpanWrapper = OpenCensusSpanWrapper; +exports.OpenCensusTracerWrapper = OpenCensusTracerWrapper; +exports.TestSpan = TestSpan; +exports.TestTracer = TestTracer; exports.context = context; exports.createSpanFunction = createSpanFunction; exports.extractSpanContextFromTraceParentHeader = extractSpanContextFromTraceParentHeader; @@ -8045,9 +8940,9 @@ exports.getSpan = getSpan; exports.getSpanContext = getSpanContext; exports.getTraceParentHeader = getTraceParentHeader; exports.getTracer = getTracer; -exports.isSpanContextValid = isSpanContextValid; exports.setSpan = setSpan; exports.setSpanContext = setSpanContext; +exports.setTracer = setTracer; //# sourceMappingURL=index.js.map @@ -9130,6 +10025,7 @@ Object.defineProperty(exports, '__esModule', { value: true }); function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } +var tslib = __webpack_require__(865); var uuid = __webpack_require__(585); var tough = __webpack_require__(393); var http = __webpack_require__(605); @@ -9142,7 +10038,6 @@ var url = __webpack_require__(835); var stream = __webpack_require__(794); var logger$1 = __webpack_require__(492); var tunnel = __webpack_require__(413); -var tslib = __webpack_require__(865); var coreAuth = __webpack_require__(229); var xml2js = __webpack_require__(992); var os = __webpack_require__(87); @@ -9159,7 +10054,7 @@ function getHeaderKey(headerName) { } function isHttpHeadersLike(object) { if (object && typeof object === "object") { - const castObject = object; + var castObject = object; if (typeof castObject.rawHeaders === "function" && typeof castObject.clone === "function" && typeof castObject.get === "function" && @@ -9178,11 +10073,11 @@ function isHttpHeadersLike(object) { /** * A collection of HTTP header key/value pairs. */ -class HttpHeaders { - constructor(rawHeaders) { +var HttpHeaders = /** @class */ (function () { + function HttpHeaders(rawHeaders) { this._headersMap = {}; if (rawHeaders) { - for (const headerName in rawHeaders) { + for (var headerName in rawHeaders) { this.set(headerName, rawHeaders[headerName]); } } @@ -9193,99 +10088,100 @@ class HttpHeaders { * @param headerName - The name of the header to set. This value is case-insensitive. * @param headerValue - The value of the header to set. */ - set(headerName, headerValue) { + HttpHeaders.prototype.set = function (headerName, headerValue) { this._headersMap[getHeaderKey(headerName)] = { name: headerName, value: headerValue.toString() }; - } + }; /** * Get the header value for the provided header name, or undefined if no header exists in this * collection with the provided name. * @param headerName - The name of the header. */ - get(headerName) { - const header = this._headersMap[getHeaderKey(headerName)]; + HttpHeaders.prototype.get = function (headerName) { + var header = this._headersMap[getHeaderKey(headerName)]; return !header ? undefined : header.value; - } + }; /** * Get whether or not this header collection contains a header entry for the provided header name. */ - contains(headerName) { + HttpHeaders.prototype.contains = function (headerName) { return !!this._headersMap[getHeaderKey(headerName)]; - } + }; /** * Remove the header with the provided headerName. Return whether or not the header existed and * was removed. * @param headerName - The name of the header to remove. */ - remove(headerName) { - const result = this.contains(headerName); + HttpHeaders.prototype.remove = function (headerName) { + var result = this.contains(headerName); delete this._headersMap[getHeaderKey(headerName)]; return result; - } + }; /** * Get the headers that are contained this collection as an object. */ - rawHeaders() { - const result = {}; - for (const headerKey in this._headersMap) { - const header = this._headersMap[headerKey]; + HttpHeaders.prototype.rawHeaders = function () { + var result = {}; + for (var headerKey in this._headersMap) { + var header = this._headersMap[headerKey]; result[header.name.toLowerCase()] = header.value; } return result; - } + }; /** * Get the headers that are contained in this collection as an array. */ - headersArray() { - const headers = []; - for (const headerKey in this._headersMap) { + HttpHeaders.prototype.headersArray = function () { + var headers = []; + for (var headerKey in this._headersMap) { headers.push(this._headersMap[headerKey]); } return headers; - } + }; /** * Get the header names that are contained in this collection. */ - headerNames() { - const headerNames = []; - const headers = this.headersArray(); - for (let i = 0; i < headers.length; ++i) { + HttpHeaders.prototype.headerNames = function () { + var headerNames = []; + var headers = this.headersArray(); + for (var i = 0; i < headers.length; ++i) { headerNames.push(headers[i].name); } return headerNames; - } + }; /** * Get the header values that are contained in this collection. */ - headerValues() { - const headerValues = []; - const headers = this.headersArray(); - for (let i = 0; i < headers.length; ++i) { + HttpHeaders.prototype.headerValues = function () { + var headerValues = []; + var headers = this.headersArray(); + for (var i = 0; i < headers.length; ++i) { headerValues.push(headers[i].value); } return headerValues; - } + }; /** * Get the JSON object representation of this HTTP header collection. */ - toJson() { + HttpHeaders.prototype.toJson = function () { return this.rawHeaders(); - } + }; /** * Get the string representation of this HTTP header collection. */ - toString() { + HttpHeaders.prototype.toString = function () { return JSON.stringify(this.toJson()); - } + }; /** * Create a deep clone/copy of this HttpHeaders collection. */ - clone() { + HttpHeaders.prototype.clone = function () { return new HttpHeaders(this.rawHeaders()); - } -} + }; + return HttpHeaders; +}()); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. @@ -9303,7 +10199,7 @@ function encodeString(value) { function encodeByteArray(value) { // Buffer.from accepts | -- the TypeScript definition is off here // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length - const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); + var bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); return bufferValue.toString("base64"); } /** @@ -9316,11 +10212,11 @@ function decodeString(value) { // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -const Constants = { +var Constants = { /** * The core-http version */ - coreHttpVersion: "2.1.0", + coreHttpVersion: "1.2.6", /** * Specifies HTTP. */ @@ -9359,8 +10255,7 @@ const Constants = { PATCH: "PATCH" }, StatusCodes: { - TooManyRequests: 429, - ServiceUnavailable: 503 + TooManyRequests: 429 } }, /** @@ -9390,18 +10285,18 @@ const Constants = { /** * Default key used to access the XML attributes. */ -const XML_ATTRKEY = "$"; +var XML_ATTRKEY = "$"; /** * Default key used to access the XML value content. */ -const XML_CHARKEY = "_"; +var XML_CHARKEY = "_"; // Copyright (c) Microsoft Corporation. -const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; +var validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; /** * A constant that indicates whether the environment is node.js or browser based. */ -const isNode = typeof process !== "undefined" && +var isNode = typeof process !== "undefined" && !!process.version && !!process.versions && !!process.versions.node; @@ -9427,7 +10322,7 @@ function encodeUri(uri) { * @returns The stripped version of Http Response. */ function stripResponse(response) { - const strippedResponse = {}; + var strippedResponse = {}; strippedResponse.body = response.bodyAsText; strippedResponse.headers = response.headers; strippedResponse.status = response.status; @@ -9441,7 +10336,7 @@ function stripResponse(response) { * @returns The stripped version of Http Request. */ function stripRequest(request) { - const strippedRequest = request.clone(); + var strippedRequest = request.clone(); if (strippedRequest.headers) { strippedRequest.headers.remove("authorization"); } @@ -9474,12 +10369,21 @@ function generateUuid() { * @returns A chain of resolved or rejected promises */ function executePromisesSequentially(promiseFactories, kickstart) { - let result = Promise.resolve(kickstart); - promiseFactories.forEach((promiseFactory) => { + var result = Promise.resolve(kickstart); + promiseFactories.forEach(function (promiseFactory) { result = result.then(promiseFactory); }); return result; } +/** + * A wrapper for setTimeout that resolves a promise after t milliseconds. + * @param t - The number of milliseconds to be delayed. + * @param value - The value to be resolved with after a timeout of t milliseconds. + * @returns Resolved promise + */ +function delay(t, value) { + return new Promise(function (resolve) { return setTimeout(function () { return resolve(value); }, t); }); +} /** * Converts a Promise to a callback. * @param promise - The Promise to be converted to a callback @@ -9492,13 +10396,13 @@ function promiseToCallback(promise) { throw new Error("The provided input is not a Promise."); } // eslint-disable-next-line @typescript-eslint/ban-types - return (cb) => { + return function (cb) { promise - .then((data) => { + .then(function (data) { // eslint-disable-next-line promise/no-callback-in-promise return cb(undefined, data); }) - .catch((err) => { + .catch(function (err) { // eslint-disable-next-line promise/no-callback-in-promise cb(err); }); @@ -9513,25 +10417,26 @@ function promiseToServiceCallback(promise) { if (typeof promise.then !== "function") { throw new Error("The provided input is not a Promise."); } - return (cb) => { + return function (cb) { promise - .then((data) => { + .then(function (data) { return process.nextTick(cb, undefined, data.parsedBody, data.request, data); }) - .catch((err) => { + .catch(function (err) { process.nextTick(cb, err); }); }; } function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { + var _a, _b, _c; if (!Array.isArray(obj)) { obj = [obj]; } if (!xmlNamespaceKey || !xmlNamespace) { - return { [elementName]: obj }; + return _a = {}, _a[elementName] = obj, _a; } - const result = { [elementName]: obj }; - result[XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace }; + var result = (_b = {}, _b[elementName] = obj, _b); + result[XML_ATTRKEY] = (_c = {}, _c[xmlNamespaceKey] = xmlNamespace, _c); return result; } /** @@ -9540,14 +10445,14 @@ function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { * @param sourceCtors - An array of source objects from which the properties need to be taken. */ function applyMixins(targetCtorParam, sourceCtors) { - const castTargetCtorParam = targetCtorParam; - sourceCtors.forEach((sourceCtor) => { - Object.getOwnPropertyNames(sourceCtor.prototype).forEach((name) => { + var castTargetCtorParam = targetCtorParam; + sourceCtors.forEach(function (sourceCtor) { + Object.getOwnPropertyNames(sourceCtor.prototype).forEach(function (name) { castTargetCtorParam.prototype[name] = sourceCtor.prototype[name]; }); }); } -const validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; +var validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; /** * Indicates whether the given string is in ISO 8601 format. * @param value - The value to be validated for ISO 8601 duration format. @@ -9597,18 +10502,19 @@ function isObject(input) { } // Copyright (c) Microsoft Corporation. -class Serializer { - constructor(modelMappers = {}, isXML) { +var Serializer = /** @class */ (function () { + function Serializer(modelMappers, isXML) { + if (modelMappers === void 0) { modelMappers = {}; } this.modelMappers = modelMappers; this.isXML = isXML; } - validateConstraints(mapper, value, objectName) { - const failValidation = (constraintName, constraintValue) => { - throw new Error(`"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.`); + Serializer.prototype.validateConstraints = function (mapper, value, objectName) { + var failValidation = function (constraintName, constraintValue) { + throw new Error("\"" + objectName + "\" with value \"" + value + "\" should satisfy the constraint \"" + constraintName + "\": " + constraintValue + "."); }; if (mapper.constraints && value != undefined) { - const valueAsNumber = value; - const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems } = mapper.constraints; + var valueAsNumber = value; + var _a = mapper.constraints, ExclusiveMaximum = _a.ExclusiveMaximum, ExclusiveMinimum = _a.ExclusiveMinimum, InclusiveMaximum = _a.InclusiveMaximum, InclusiveMinimum = _a.InclusiveMinimum, MaxItems = _a.MaxItems, MaxLength = _a.MaxLength, MinItems = _a.MinItems, MinLength = _a.MinLength, MultipleOf = _a.MultipleOf, Pattern = _a.Pattern, UniqueItems = _a.UniqueItems; if (ExclusiveMaximum != undefined && valueAsNumber >= ExclusiveMaximum) { failValidation("ExclusiveMaximum", ExclusiveMaximum); } @@ -9621,7 +10527,7 @@ class Serializer { if (InclusiveMinimum != undefined && valueAsNumber < InclusiveMinimum) { failValidation("InclusiveMinimum", InclusiveMinimum); } - const valueAsArray = value; + var valueAsArray = value; if (MaxItems != undefined && valueAsArray.length > MaxItems) { failValidation("MaxItems", MaxItems); } @@ -9638,17 +10544,17 @@ class Serializer { failValidation("MultipleOf", MultipleOf); } if (Pattern) { - const pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; + var pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; if (typeof value !== "string" || value.match(pattern) === null) { failValidation("Pattern", Pattern); } } if (UniqueItems && - valueAsArray.some((item, i, ar) => ar.indexOf(item) !== i)) { + valueAsArray.some(function (item, i, ar) { return ar.indexOf(item) !== i; })) { failValidation("UniqueItems", UniqueItems); } } - } + }; /** * Serialize the given object based on its metadata defined in the mapper * @@ -9658,15 +10564,16 @@ class Serializer { * @param options - additional options to deserialization * @returns A valid serialized Javascript object */ - serialize(mapper, object, objectName, options = {}) { + Serializer.prototype.serialize = function (mapper, object, objectName, options) { var _a, _b, _c; - const updatedOptions = { + if (options === void 0) { options = {}; } + var updatedOptions = { rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY }; - let payload = {}; - const mapperType = mapper.type.name; + var payload = {}; + var mapperType = mapper.type.name; if (!objectName) { objectName = mapper.serializedName; } @@ -9685,15 +10592,15 @@ class Serializer { // true || null | undefined/null // false || X | undefined // undefined || X | undefined/null - const { required, nullable } = mapper; + var required = mapper.required, nullable = mapper.nullable; if (required && nullable && object === undefined) { - throw new Error(`${objectName} cannot be undefined.`); + throw new Error(objectName + " cannot be undefined."); } if (required && !nullable && object == undefined) { - throw new Error(`${objectName} cannot be null or undefined.`); + throw new Error(objectName + " cannot be null or undefined."); } if (!required && nullable === false && object === null) { - throw new Error(`${objectName} cannot be null.`); + throw new Error(objectName + " cannot be null."); } if (object == undefined) { payload = object; @@ -9708,7 +10615,7 @@ class Serializer { payload = serializeBasicTypes(mapperType, objectName, object); } else if (mapperType.match(/^Enum$/i) !== null) { - const enumMapper = mapper; + var enumMapper = mapper; payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); } else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null) { @@ -9731,7 +10638,7 @@ class Serializer { } } return payload; - } + }; /** * Deserialize the given object based on its metadata defined in the mapper * @@ -9741,9 +10648,10 @@ class Serializer { * @param options - Controls behavior of XML parser and builder. * @returns A valid deserialized Javascript object */ - deserialize(mapper, responseBody, objectName, options = {}) { + Serializer.prototype.deserialize = function (mapper, responseBody, objectName, options) { var _a, _b, _c; - const updatedOptions = { + if (options === void 0) { options = {}; } + var updatedOptions = { rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY @@ -9761,8 +10669,8 @@ class Serializer { } return responseBody; } - let payload; - const mapperType = mapper.type.name; + var payload; + var mapperType = mapper.type.name; if (!objectName) { objectName = mapper.serializedName; } @@ -9771,8 +10679,8 @@ class Serializer { } else { if (this.isXML) { - const xmlCharKey = updatedOptions.xmlCharKey; - const castResponseBody = responseBody; + var xmlCharKey = updatedOptions.xmlCharKey; + var castResponseBody = responseBody; /** * If the mapper specifies this as a non-composite type value but the responseBody contains * both header ("$" i.e., XML_ATTRKEY) and body ("#" i.e., XML_CHARKEY) properties, @@ -9826,10 +10734,11 @@ class Serializer { payload = mapper.defaultValue; } return payload; - } -} + }; + return Serializer; +}()); function trimEnd(str, ch) { - let len = str.length; + var len = str.length; while (len - 1 >= 0 && str[len - 1] === ch) { --len; } @@ -9840,10 +10749,10 @@ function bufferToBase64Url(buffer) { return undefined; } if (!(buffer instanceof Uint8Array)) { - throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`); + throw new Error("Please provide an input of type Uint8Array for converting to Base64Url."); } // Uint8Array to Base64. - const str = encodeByteArray(buffer); + var str = encodeByteArray(buffer); // Base64 to Base64Url. return trimEnd(str, "=") .replace(/\+/g, "-") @@ -9862,11 +10771,12 @@ function base64UrlToByteArray(str) { return decodeString(str); } function splitSerializeName(prop) { - const classes = []; - let partialclass = ""; + var classes = []; + var partialclass = ""; if (prop) { - const subwords = prop.split("."); - for (const item of subwords) { + var subwords = prop.split("."); + for (var _i = 0, subwords_1 = subwords; _i < subwords_1.length; _i++) { + var item = subwords_1[_i]; if (item.charAt(item.length - 1) === "\\") { partialclass += item.substr(0, item.length - 1) + "."; } @@ -9898,32 +10808,32 @@ function serializeBasicTypes(typeName, objectName, value) { if (value !== null && value !== undefined) { if (typeName.match(/^Number$/i) !== null) { if (typeof value !== "number") { - throw new Error(`${objectName} with value ${value} must be of type number.`); + throw new Error(objectName + " with value " + value + " must be of type number."); } } else if (typeName.match(/^String$/i) !== null) { if (typeof value.valueOf() !== "string") { - throw new Error(`${objectName} with value "${value}" must be of type string.`); + throw new Error(objectName + " with value \"" + value + "\" must be of type string."); } } else if (typeName.match(/^Uuid$/i) !== null) { if (!(typeof value.valueOf() === "string" && isValidUuid(value))) { - throw new Error(`${objectName} with value "${value}" must be of type string and a valid uuid.`); + throw new Error(objectName + " with value \"" + value + "\" must be of type string and a valid uuid."); } } else if (typeName.match(/^Boolean$/i) !== null) { if (typeof value !== "boolean") { - throw new Error(`${objectName} with value ${value} must be of type boolean.`); + throw new Error(objectName + " with value " + value + " must be of type boolean."); } } else if (typeName.match(/^Stream$/i) !== null) { - const objectType = typeof value; + var objectType = typeof value; if (objectType !== "string" && objectType !== "function" && !(value instanceof ArrayBuffer) && !ArrayBuffer.isView(value) && !((typeof Blob === "function" || typeof Blob === "object") && value instanceof Blob)) { - throw new Error(`${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.`); + throw new Error(objectName + " must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream."); } } } @@ -9931,34 +10841,34 @@ function serializeBasicTypes(typeName, objectName, value) { } function serializeEnumType(objectName, allowedValues, value) { if (!allowedValues) { - throw new Error(`Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`); + throw new Error("Please provide a set of allowedValues to validate " + objectName + " as an Enum Type."); } - const isPresent = allowedValues.some((item) => { + var isPresent = allowedValues.some(function (item) { if (typeof item.valueOf() === "string") { return item.toLowerCase() === value.toLowerCase(); } return item === value; }); if (!isPresent) { - throw new Error(`${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(allowedValues)}.`); + throw new Error(value + " is not a valid value for " + objectName + ". The valid values are: " + JSON.stringify(allowedValues) + "."); } return value; } function serializeByteArrayType(objectName, value) { - let returnValue = ""; + var returnValue = ""; if (value != undefined) { if (!(value instanceof Uint8Array)) { - throw new Error(`${objectName} must be of type Uint8Array.`); + throw new Error(objectName + " must be of type Uint8Array."); } returnValue = encodeByteArray(value); } return returnValue; } function serializeBase64UrlType(objectName, value) { - let returnValue = ""; + var returnValue = ""; if (value != undefined) { if (!(value instanceof Uint8Array)) { - throw new Error(`${objectName} must be of type Uint8Array.`); + throw new Error(objectName + " must be of type Uint8Array."); } returnValue = bufferToBase64Url(value) || ""; } @@ -9969,7 +10879,7 @@ function serializeDateTypes(typeName, value, objectName) { if (typeName.match(/^Date$/i) !== null) { if (!(value instanceof Date || (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { - throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + throw new Error(objectName + " must be an instanceof Date or a string in ISO8601 format."); } value = value instanceof Date @@ -9979,57 +10889,58 @@ function serializeDateTypes(typeName, value, objectName) { else if (typeName.match(/^DateTime$/i) !== null) { if (!(value instanceof Date || (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { - throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + throw new Error(objectName + " must be an instanceof Date or a string in ISO8601 format."); } value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); } else if (typeName.match(/^DateTimeRfc1123$/i) !== null) { if (!(value instanceof Date || (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { - throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`); + throw new Error(objectName + " must be an instanceof Date or a string in RFC-1123 format."); } value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); } else if (typeName.match(/^UnixTime$/i) !== null) { if (!(value instanceof Date || (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { - throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` + - `for it to be serialized in UnixTime/Epoch format.`); + throw new Error(objectName + " must be an instanceof Date or a string in RFC-1123/ISO8601 format " + + "for it to be serialized in UnixTime/Epoch format."); } value = dateToUnixTime(value); } else if (typeName.match(/^TimeSpan$/i) !== null) { if (!isDuration(value)) { - throw new Error(`${objectName} must be a string in ISO 8601 format. Instead was "${value}".`); + throw new Error(objectName + " must be a string in ISO 8601 format. Instead was \"" + value + "\"."); } } } return value; } function serializeSequenceType(serializer, mapper, object, objectName, isXml, options) { + var _a, _b; if (!Array.isArray(object)) { - throw new Error(`${objectName} must be of type Array.`); + throw new Error(objectName + " must be of type Array."); } - const elementType = mapper.type.element; + var elementType = mapper.type.element; if (!elementType || typeof elementType !== "object") { - throw new Error(`element" metadata for an Array must be defined in the ` + - `mapper and it must of type "object" in ${objectName}.`); + throw new Error("element\" metadata for an Array must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName + ".")); } - const tempArray = []; - for (let i = 0; i < object.length; i++) { - const serializedValue = serializer.serialize(elementType, object[i], objectName, options); + var tempArray = []; + for (var i = 0; i < object.length; i++) { + var serializedValue = serializer.serialize(elementType, object[i], objectName, options); if (isXml && elementType.xmlNamespace) { - const xmlnsKey = elementType.xmlNamespacePrefix - ? `xmlns:${elementType.xmlNamespacePrefix}` + var xmlnsKey = elementType.xmlNamespacePrefix + ? "xmlns:" + elementType.xmlNamespacePrefix : "xmlns"; if (elementType.type.name === "Composite") { - tempArray[i] = Object.assign({}, serializedValue); - tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + tempArray[i] = tslib.__assign({}, serializedValue); + tempArray[i][XML_ATTRKEY] = (_a = {}, _a[xmlnsKey] = elementType.xmlNamespace, _a); } else { tempArray[i] = {}; tempArray[i][options.xmlCharKey] = serializedValue; - tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + tempArray[i][XML_ATTRKEY] = (_b = {}, _b[xmlnsKey] = elementType.xmlNamespace, _b); } } else { @@ -10039,25 +10950,27 @@ function serializeSequenceType(serializer, mapper, object, objectName, isXml, op return tempArray; } function serializeDictionaryType(serializer, mapper, object, objectName, isXml, options) { + var _a; if (typeof object !== "object") { - throw new Error(`${objectName} must be of type object.`); + throw new Error(objectName + " must be of type object."); } - const valueType = mapper.type.value; + var valueType = mapper.type.value; if (!valueType || typeof valueType !== "object") { - throw new Error(`"value" metadata for a Dictionary must be defined in the ` + - `mapper and it must of type "object" in ${objectName}.`); + throw new Error("\"value\" metadata for a Dictionary must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName + ".")); } - const tempDictionary = {}; - for (const key of Object.keys(object)) { - const serializedValue = serializer.serialize(valueType, object[key], objectName, options); + var tempDictionary = {}; + for (var _i = 0, _b = Object.keys(object); _i < _b.length; _i++) { + var key = _b[_i]; + var serializedValue = serializer.serialize(valueType, object[key], objectName, options); // If the element needs an XML namespace we need to add it within the $ property tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options); } // Add the namespace to the root element if needed if (isXml && mapper.xmlNamespace) { - const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : "xmlns"; - const result = tempDictionary; - result[XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace }; + var xmlnsKey = mapper.xmlNamespacePrefix ? "xmlns:" + mapper.xmlNamespacePrefix : "xmlns"; + var result = tempDictionary; + result[XML_ATTRKEY] = (_a = {}, _a[xmlnsKey] = mapper.xmlNamespace, _a); return result; } return tempDictionary; @@ -10069,9 +10982,9 @@ function serializeDictionaryType(serializer, mapper, object, objectName, isXml, * @param objectName - Name of the object being serialized */ function resolveAdditionalProperties(serializer, mapper, objectName) { - const additionalProperties = mapper.type.additionalProperties; + var additionalProperties = mapper.type.additionalProperties; if (!additionalProperties && mapper.type.className) { - const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + var modelMapper = resolveReferencedMapper(serializer, mapper, objectName); return modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.additionalProperties; } return additionalProperties; @@ -10083,9 +10996,9 @@ function resolveAdditionalProperties(serializer, mapper, objectName) { * @param objectName - Name of the object being serialized */ function resolveReferencedMapper(serializer, mapper, objectName) { - const className = mapper.type.className; + var className = mapper.type.className; if (!className) { - throw new Error(`Class name for model "${objectName}" is not provided in the mapper "${JSON.stringify(mapper, undefined, 2)}".`); + throw new Error("Class name for model \"" + objectName + "\" is not provided in the mapper \"" + JSON.stringify(mapper, undefined, 2) + "\"."); } return serializer.modelMappers[className]; } @@ -10095,34 +11008,36 @@ function resolveReferencedMapper(serializer, mapper, objectName) { * @param mapper - The composite mapper to resolve */ function resolveModelProperties(serializer, mapper, objectName) { - let modelProps = mapper.type.modelProperties; + var modelProps = mapper.type.modelProperties; if (!modelProps) { - const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + var modelMapper = resolveReferencedMapper(serializer, mapper, objectName); if (!modelMapper) { - throw new Error(`mapper() cannot be null or undefined for model "${mapper.type.className}".`); + throw new Error("mapper() cannot be null or undefined for model \"" + mapper.type.className + "\"."); } modelProps = modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.modelProperties; if (!modelProps) { - throw new Error(`modelProperties cannot be null or undefined in the ` + - `mapper "${JSON.stringify(modelMapper)}" of type "${mapper.type.className}" for object "${objectName}".`); + throw new Error("modelProperties cannot be null or undefined in the " + + ("mapper \"" + JSON.stringify(modelMapper) + "\" of type \"" + mapper.type.className + "\" for object \"" + objectName + "\".")); } } return modelProps; } function serializeCompositeType(serializer, mapper, object, objectName, isXml, options) { + var _a, _b; if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); } if (object != undefined) { - const payload = {}; - const modelProps = resolveModelProperties(serializer, mapper, objectName); - for (const key of Object.keys(modelProps)) { - const propertyMapper = modelProps[key]; + var payload = {}; + var modelProps = resolveModelProperties(serializer, mapper, objectName); + for (var _i = 0, _c = Object.keys(modelProps); _i < _c.length; _i++) { + var key = _c[_i]; + var propertyMapper = modelProps[key]; if (propertyMapper.readOnly) { continue; } - let propName; - let parentObject = payload; + var propName = void 0; + var parentObject = payload; if (serializer.isXML) { if (propertyMapper.xmlIsWrapped) { propName = propertyMapper.xmlName; @@ -10132,10 +11047,11 @@ function serializeCompositeType(serializer, mapper, object, objectName, isXml, o } } else { - const paths = splitSerializeName(propertyMapper.serializedName); + var paths = splitSerializeName(propertyMapper.serializedName); propName = paths.pop(); - for (const pathName of paths) { - const childObject = parentObject[pathName]; + for (var _d = 0, paths_1 = paths; _d < paths_1.length; _d++) { + var pathName = paths_1[_d]; + var childObject = parentObject[pathName]; if (childObject == undefined && (object[key] != undefined || propertyMapper.defaultValue !== undefined)) { parentObject[pathName] = {}; @@ -10145,24 +11061,24 @@ function serializeCompositeType(serializer, mapper, object, objectName, isXml, o } if (parentObject != undefined) { if (isXml && mapper.xmlNamespace) { - const xmlnsKey = mapper.xmlNamespacePrefix - ? `xmlns:${mapper.xmlNamespacePrefix}` + var xmlnsKey = mapper.xmlNamespacePrefix + ? "xmlns:" + mapper.xmlNamespacePrefix : "xmlns"; - parentObject[XML_ATTRKEY] = Object.assign(Object.assign({}, parentObject[XML_ATTRKEY]), { [xmlnsKey]: mapper.xmlNamespace }); + parentObject[XML_ATTRKEY] = tslib.__assign(tslib.__assign({}, parentObject[XML_ATTRKEY]), (_a = {}, _a[xmlnsKey] = mapper.xmlNamespace, _a)); } - const propertyObjectName = propertyMapper.serializedName !== "" + var propertyObjectName = propertyMapper.serializedName !== "" ? objectName + "." + propertyMapper.serializedName : objectName; - let toSerialize = object[key]; - const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + var toSerialize = object[key]; + var polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); if (polymorphicDiscriminator && polymorphicDiscriminator.clientName === key && toSerialize == undefined) { toSerialize = mapper.serializedName; } - const serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName, options); + var serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName, options); if (serializedValue !== undefined && propName != undefined) { - const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options); + var value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options); if (isXml && propertyMapper.xmlIsAttribute) { // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js. // This keeps things simple while preventing name collision @@ -10171,7 +11087,7 @@ function serializeCompositeType(serializer, mapper, object, objectName, isXml, o parentObject[XML_ATTRKEY][propName] = serializedValue; } else if (isXml && propertyMapper.xmlIsWrapped) { - parentObject[propName] = { [propertyMapper.xmlElementName]: value }; + parentObject[propName] = (_b = {}, _b[propertyMapper.xmlElementName] = value, _b); } else { parentObject[propName] = value; @@ -10179,14 +11095,17 @@ function serializeCompositeType(serializer, mapper, object, objectName, isXml, o } } } - const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName); + var additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName); if (additionalPropertiesMapper) { - const propNames = Object.keys(modelProps); - for (const clientPropName in object) { - const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName); + var propNames = Object.keys(modelProps); + var _loop_1 = function (clientPropName) { + var isAdditionalProperty = propNames.every(function (pn) { return pn !== clientPropName; }); if (isAdditionalProperty) { payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]', options); } + }; + for (var clientPropName in object) { + _loop_1(clientPropName); } } return payload; @@ -10194,24 +11113,25 @@ function serializeCompositeType(serializer, mapper, object, objectName, isXml, o return object; } function getXmlObjectValue(propertyMapper, serializedValue, isXml, options) { + var _a; if (!isXml || !propertyMapper.xmlNamespace) { return serializedValue; } - const xmlnsKey = propertyMapper.xmlNamespacePrefix - ? `xmlns:${propertyMapper.xmlNamespacePrefix}` + var xmlnsKey = propertyMapper.xmlNamespacePrefix + ? "xmlns:" + propertyMapper.xmlNamespacePrefix : "xmlns"; - const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace }; + var xmlNamespace = (_a = {}, _a[xmlnsKey] = propertyMapper.xmlNamespace, _a); if (["Composite"].includes(propertyMapper.type.name)) { if (serializedValue[XML_ATTRKEY]) { return serializedValue; } else { - const result = Object.assign({}, serializedValue); - result[XML_ATTRKEY] = xmlNamespace; - return result; + var result_1 = tslib.__assign({}, serializedValue); + result_1[XML_ATTRKEY] = xmlNamespace; + return result_1; } } - const result = {}; + var result = {}; result[options.xmlCharKey] = serializedValue; result[XML_ATTRKEY] = xmlNamespace; return result; @@ -10224,22 +11144,24 @@ function deserializeCompositeType(serializer, mapper, responseBody, objectName, if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); } - const modelProps = resolveModelProperties(serializer, mapper, objectName); - let instance = {}; - const handledPropertyNames = []; - for (const key of Object.keys(modelProps)) { - const propertyMapper = modelProps[key]; - const paths = splitSerializeName(modelProps[key].serializedName); + var modelProps = resolveModelProperties(serializer, mapper, objectName); + var instance = {}; + var handledPropertyNames = []; + for (var _i = 0, _b = Object.keys(modelProps); _i < _b.length; _i++) { + var key = _b[_i]; + var propertyMapper = modelProps[key]; + var paths = splitSerializeName(modelProps[key].serializedName); handledPropertyNames.push(paths[0]); - const { serializedName, xmlName, xmlElementName } = propertyMapper; - let propertyObjectName = objectName; + var serializedName = propertyMapper.serializedName, xmlName = propertyMapper.xmlName, xmlElementName = propertyMapper.xmlElementName; + var propertyObjectName = objectName; if (serializedName !== "" && serializedName !== undefined) { propertyObjectName = objectName + "." + serializedName; } - const headerCollectionPrefix = propertyMapper.headerCollectionPrefix; + var headerCollectionPrefix = propertyMapper.headerCollectionPrefix; if (headerCollectionPrefix) { - const dictionary = {}; - for (const headerKey of Object.keys(responseBody)) { + var dictionary = {}; + for (var _c = 0, _d = Object.keys(responseBody); _c < _d.length; _c++) { + var headerKey = _d[_c]; if (headerKey.startsWith(headerCollectionPrefix)) { dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName, options); } @@ -10252,7 +11174,7 @@ function deserializeCompositeType(serializer, mapper, responseBody, objectName, instance[key] = serializer.deserialize(propertyMapper, responseBody[XML_ATTRKEY][xmlName], propertyObjectName, options); } else { - const propertyName = xmlElementName || xmlName || serializedName; + var propertyName = xmlElementName || xmlName || serializedName; if (propertyMapper.xmlIsWrapped) { /* a list of wrapped by For the xml example below @@ -10268,28 +11190,29 @@ function deserializeCompositeType(serializer, mapper, responseBody, objectName, } xmlName is "Cors" and xmlElementName is"CorsRule". */ - const wrapped = responseBody[xmlName]; - const elementList = (_a = wrapped === null || wrapped === void 0 ? void 0 : wrapped[xmlElementName]) !== null && _a !== void 0 ? _a : []; + var wrapped = responseBody[xmlName]; + var elementList = (_a = wrapped === null || wrapped === void 0 ? void 0 : wrapped[xmlElementName]) !== null && _a !== void 0 ? _a : []; instance[key] = serializer.deserialize(propertyMapper, elementList, propertyObjectName, options); } else { - const property = responseBody[propertyName]; + var property = responseBody[propertyName]; instance[key] = serializer.deserialize(propertyMapper, property, propertyObjectName, options); } } } else { // deserialize the property if it is present in the provided responseBody instance - let propertyInstance; - let res = responseBody; + var propertyInstance = void 0; + var res = responseBody; // traversing the object step by step. - for (const item of paths) { + for (var _e = 0, paths_2 = paths; _e < paths_2.length; _e++) { + var item = paths_2[_e]; if (!res) break; res = res[item]; } propertyInstance = res; - const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; + var polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; // checking that the model property name (key)(ex: "fishtype") and the // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") @@ -10304,14 +11227,15 @@ function deserializeCompositeType(serializer, mapper, responseBody, objectName, propertyInstance == undefined) { propertyInstance = mapper.serializedName; } - let serializedValue; + var serializedValue = void 0; // paging if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { propertyInstance = responseBody[key]; - const arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + var arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); // Copy over any properties that have already been added into the instance, where they do // not exist on the newly de-serialized array - for (const [k, v] of Object.entries(instance)) { + for (var _f = 0, _g = Object.entries(instance); _f < _g.length; _f++) { + var _h = _g[_f], k = _h[0], v = _h[1]; if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) { arrayInstance[k] = v; } @@ -10324,25 +11248,26 @@ function deserializeCompositeType(serializer, mapper, responseBody, objectName, } } } - const additionalPropertiesMapper = mapper.type.additionalProperties; + var additionalPropertiesMapper = mapper.type.additionalProperties; if (additionalPropertiesMapper) { - const isAdditionalProperty = (responsePropName) => { - for (const clientPropName in modelProps) { - const paths = splitSerializeName(modelProps[clientPropName].serializedName); + var isAdditionalProperty = function (responsePropName) { + for (var clientPropName in modelProps) { + var paths = splitSerializeName(modelProps[clientPropName].serializedName); if (paths[0] === responsePropName) { return false; } } return true; }; - for (const responsePropName in responseBody) { + for (var responsePropName in responseBody) { if (isAdditionalProperty(responsePropName)) { instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]', options); } } } else if (responseBody) { - for (const key of Object.keys(responseBody)) { + for (var _j = 0, _k = Object.keys(responseBody); _j < _k.length; _j++) { + var key = _k[_j]; if (instance[key] === undefined && !handledPropertyNames.includes(key) && !isSpecialXmlProperty(key, options)) { @@ -10353,14 +11278,15 @@ function deserializeCompositeType(serializer, mapper, responseBody, objectName, return instance; } function deserializeDictionaryType(serializer, mapper, responseBody, objectName, options) { - const value = mapper.type.value; + var value = mapper.type.value; if (!value || typeof value !== "object") { - throw new Error(`"value" metadata for a Dictionary must be defined in the ` + - `mapper and it must of type "object" in ${objectName}`); + throw new Error("\"value\" metadata for a Dictionary must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName)); } if (responseBody) { - const tempDictionary = {}; - for (const key of Object.keys(responseBody)) { + var tempDictionary = {}; + for (var _i = 0, _a = Object.keys(responseBody); _i < _a.length; _i++) { + var key = _a[_i]; tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options); } return tempDictionary; @@ -10368,36 +11294,36 @@ function deserializeDictionaryType(serializer, mapper, responseBody, objectName, return responseBody; } function deserializeSequenceType(serializer, mapper, responseBody, objectName, options) { - const element = mapper.type.element; + var element = mapper.type.element; if (!element || typeof element !== "object") { - throw new Error(`element" metadata for an Array must be defined in the ` + - `mapper and it must of type "object" in ${objectName}`); + throw new Error("element\" metadata for an Array must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName)); } if (responseBody) { if (!Array.isArray(responseBody)) { // xml2js will interpret a single element array as just the element, so force it to be an array responseBody = [responseBody]; } - const tempArray = []; - for (let i = 0; i < responseBody.length; i++) { - tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`, options); + var tempArray = []; + for (var i = 0; i < responseBody.length; i++) { + tempArray[i] = serializer.deserialize(element, responseBody[i], objectName + "[" + i + "]", options); } return tempArray; } return responseBody; } function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { - const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + var polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); if (polymorphicDiscriminator) { - const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; + var discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; if (discriminatorName != undefined) { - const discriminatorValue = object[discriminatorName]; + var discriminatorValue = object[discriminatorName]; if (discriminatorValue != undefined) { - const typeName = mapper.type.uberParent || mapper.type.className; - const indexDiscriminator = discriminatorValue === typeName + var typeName = mapper.type.uberParent || mapper.type.className; + var indexDiscriminator = discriminatorValue === typeName ? discriminatorValue : typeName + "." + discriminatorValue; - const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator]; + var polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator]; if (polymorphicMapper) { mapper = polymorphicMapper; } @@ -10418,7 +11344,7 @@ function getPolymorphicDiscriminatorSafely(serializer, typeName) { } // TODO: why is this here? function serializeObject(toSerialize) { - const castToSerialize = toSerialize; + var castToSerialize = toSerialize; if (toSerialize == undefined) return undefined; if (toSerialize instanceof Uint8Array) { @@ -10429,15 +11355,15 @@ function serializeObject(toSerialize) { return toSerialize.toISOString(); } else if (Array.isArray(toSerialize)) { - const array = []; - for (let i = 0; i < toSerialize.length; i++) { + var array = []; + for (var i = 0; i < toSerialize.length; i++) { array.push(serializeObject(toSerialize[i])); } return array; } else if (typeof toSerialize === "object") { - const dictionary = {}; - for (const property in toSerialize) { + var dictionary = {}; + for (var property in toSerialize) { dictionary[property] = serializeObject(castToSerialize[property]); } return dictionary; @@ -10448,14 +11374,15 @@ function serializeObject(toSerialize) { * Utility function to create a K:V from a list of strings */ function strEnum(o) { - const result = {}; - for (const key of o) { + var result = {}; + for (var _i = 0, o_1 = o; _i < o_1.length; _i++) { + var key = o_1[_i]; result[key] = key; } return result; } // eslint-disable-next-line @typescript-eslint/no-redeclare -const MapperType = strEnum([ +var MapperType = strEnum([ "Base64Url", "Boolean", "ByteArray", @@ -10477,7 +11404,7 @@ const MapperType = strEnum([ // Copyright (c) Microsoft Corporation. function isWebResourceLike(object) { if (object && typeof object === "object") { - const castObject = object; + var castObject = object; if (typeof castObject.url === "string" && typeof castObject.method === "string" && typeof castObject.headers === "object" && @@ -10496,8 +11423,8 @@ function isWebResourceLike(object) { * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary * properties to initiate a request. */ -class WebResource { - constructor(url, method, body, query, headers, streamResponseBody, withCredentials, abortSignal, timeout, onUploadProgress, onDownloadProgress, proxySettings, keepAlive, decompressResponse, streamResponseStatusCodes) { +var WebResource = /** @class */ (function () { + function WebResource(url, method, body, query, headers, streamResponseBody, withCredentials, abortSignal, timeout, onUploadProgress, onDownloadProgress, proxySettings, keepAlive, decompressResponse, streamResponseStatusCodes) { this.streamResponseBody = streamResponseBody; this.streamResponseStatusCodes = streamResponseStatusCodes; this.url = url || ""; @@ -10521,20 +11448,20 @@ class WebResource { * headers["accept-language"] are defined. It will throw an error if one of the above * mentioned properties are not defined. */ - validateRequestProperties() { + WebResource.prototype.validateRequestProperties = function () { if (!this.method) { throw new Error("WebResource.method is required."); } if (!this.url) { throw new Error("WebResource.url is required."); } - } + }; /** * Prepares the request. * @param options - Options to provide for preparing the request. * @returns Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. */ - prepare(options) { + WebResource.prototype.prepare = function (options) { if (!options) { throw new Error("options object is required"); } @@ -10563,7 +11490,7 @@ class WebResource { } // set the method if (options.method) { - const validMethods = ["GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", "PATCH", "TRACE"]; + var validMethods = ["GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", "PATCH", "TRACE"]; if (validMethods.indexOf(options.method.toUpperCase()) === -1) { throw new Error('The provided method "' + options.method + @@ -10574,70 +11501,70 @@ class WebResource { this.method = options.method.toUpperCase(); // construct the url if path template is provided if (options.pathTemplate) { - const { pathTemplate, pathParameters } = options; - if (typeof pathTemplate !== "string") { + var pathTemplate_1 = options.pathTemplate, pathParameters_1 = options.pathParameters; + if (typeof pathTemplate_1 !== "string") { throw new Error('options.pathTemplate must be of type "string".'); } if (!options.baseUrl) { options.baseUrl = "https://management.azure.com"; } - const baseUrl = options.baseUrl; - let url = baseUrl + + var baseUrl = options.baseUrl; + var url_1 = baseUrl + (baseUrl.endsWith("/") ? "" : "/") + - (pathTemplate.startsWith("/") ? pathTemplate.slice(1) : pathTemplate); - const segments = url.match(/({[\w-]*\s*[\w-]*})/gi); + (pathTemplate_1.startsWith("/") ? pathTemplate_1.slice(1) : pathTemplate_1); + var segments = url_1.match(/({[\w-]*\s*[\w-]*})/gi); if (segments && segments.length) { - if (!pathParameters) { - throw new Error(`pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.`); + if (!pathParameters_1) { + throw new Error("pathTemplate: " + pathTemplate_1 + " has been provided. Hence, options.pathParameters must also be provided."); } segments.forEach(function (item) { - const pathParamName = item.slice(1, -1); - const pathParam = pathParameters[pathParamName]; + var pathParamName = item.slice(1, -1); + var pathParam = pathParameters_1[pathParamName]; if (pathParam === null || pathParam === undefined || !(typeof pathParam === "string" || typeof pathParam === "object")) { - const stringifiedPathParameters = JSON.stringify(pathParameters, undefined, 2); - throw new Error(`pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` + - ` however, it is not present in parameters: ${stringifiedPathParameters}.` + - `The value of the path parameter can either be a "string" of the form { ${pathParamName}: "some sample value" } or ` + - `it can be an "object" of the form { "${pathParamName}": { value: "some sample value", skipUrlEncoding: true } }.`); + var stringifiedPathParameters = JSON.stringify(pathParameters_1, undefined, 2); + throw new Error("pathTemplate: " + pathTemplate_1 + " contains the path parameter " + pathParamName + + (" however, it is not present in parameters: " + stringifiedPathParameters + ".") + + ("The value of the path parameter can either be a \"string\" of the form { " + pathParamName + ": \"some sample value\" } or ") + + ("it can be an \"object\" of the form { \"" + pathParamName + "\": { value: \"some sample value\", skipUrlEncoding: true } }.")); } if (typeof pathParam.valueOf() === "string") { - url = url.replace(item, encodeURIComponent(pathParam)); + url_1 = url_1.replace(item, encodeURIComponent(pathParam)); } if (typeof pathParam.valueOf() === "object") { if (!pathParam.value) { - throw new Error(`options.pathParameters[${pathParamName}] is of type "object" but it does not contain a "value" property.`); + throw new Error("options.pathParameters[" + pathParamName + "] is of type \"object\" but it does not contain a \"value\" property."); } if (pathParam.skipUrlEncoding) { - url = url.replace(item, pathParam.value); + url_1 = url_1.replace(item, pathParam.value); } else { - url = url.replace(item, encodeURIComponent(pathParam.value)); + url_1 = url_1.replace(item, encodeURIComponent(pathParam.value)); } } }); } - this.url = url; + this.url = url_1; } // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option. if (options.queryParameters) { - const queryParameters = options.queryParameters; + var queryParameters = options.queryParameters; if (typeof queryParameters !== "object") { - throw new Error(`options.queryParameters must be of type object. It should be a JSON object ` + - `of "query-parameter-name" as the key and the "query-parameter-value" as the value. ` + - `The "query-parameter-value" may be fo type "string" or an "object" of the form { value: "query-parameter-value", skipUrlEncoding: true }.`); + throw new Error("options.queryParameters must be of type object. It should be a JSON object " + + "of \"query-parameter-name\" as the key and the \"query-parameter-value\" as the value. " + + "The \"query-parameter-value\" may be fo type \"string\" or an \"object\" of the form { value: \"query-parameter-value\", skipUrlEncoding: true }."); } // append question mark if it is not present in the url if (this.url && this.url.indexOf("?") === -1) { this.url += "?"; } // construct queryString - const queryParams = []; + var queryParams = []; // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest(). this.query = {}; - for (const queryParamName in queryParameters) { - const queryParam = queryParameters[queryParamName]; + for (var queryParamName in queryParameters) { + var queryParam = queryParameters[queryParamName]; if (queryParam) { if (typeof queryParam === "string") { queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam)); @@ -10645,7 +11572,7 @@ class WebResource { } else if (typeof queryParam === "object") { if (!queryParam.value) { - throw new Error(`options.queryParameters[${queryParamName}] is of type "object" but it does not contain a "value" property.`); + throw new Error("options.queryParameters[" + queryParamName + "] is of type \"object\" but it does not contain a \"value\" property."); } if (queryParam.skipUrlEncoding) { queryParams.push(queryParamName + "=" + queryParam.value); @@ -10663,8 +11590,9 @@ class WebResource { } // add headers to the request if they are provided if (options.headers) { - const headers = options.headers; - for (const headerName of Object.keys(options.headers)) { + var headers = options.headers; + for (var _i = 0, _a = Object.keys(options.headers); _i < _a.length; _i++) { + var headerName = _a[_i]; this.headers.set(headerName, headers[headerName]); } } @@ -10711,13 +11639,13 @@ class WebResource { this.onDownloadProgress = options.onDownloadProgress; this.onUploadProgress = options.onUploadProgress; return this; - } + }; /** * Clone this WebResource HTTP request object. * @returns The clone of this WebResource HTTP request object. */ - clone() { - const result = new WebResource(this.url, this.method, this.body, this.query, this.headers && this.headers.clone(), this.streamResponseBody, this.withCredentials, this.abortSignal, this.timeout, this.onUploadProgress, this.onDownloadProgress, this.proxySettings, this.keepAlive, this.decompressResponse, this.streamResponseStatusCodes); + WebResource.prototype.clone = function () { + var result = new WebResource(this.url, this.method, this.body, this.query, this.headers && this.headers.clone(), this.streamResponseBody, this.withCredentials, this.abortSignal, this.timeout, this.onUploadProgress, this.onDownloadProgress, this.proxySettings, this.keepAlive, this.decompressResponse, this.streamResponseStatusCodes); if (this.formData) { result.formData = this.formData; } @@ -10731,42 +11659,43 @@ class WebResource { result.operationResponseGetter = this.operationResponseGetter; } return result; - } -} + }; + return WebResource; +}()); // Copyright (c) Microsoft Corporation. -const custom = util.inspect.custom; +var custom = util.inspect.custom; // Copyright (c) Microsoft Corporation. /** * A class that handles the query portion of a URLBuilder. */ -class URLQuery { - constructor() { +var URLQuery = /** @class */ (function () { + function URLQuery() { this._rawQuery = {}; } /** * Get whether or not there any query parameters in this URLQuery. */ - any() { + URLQuery.prototype.any = function () { return Object.keys(this._rawQuery).length > 0; - } + }; /** * Get the keys of the query string. */ - keys() { + URLQuery.prototype.keys = function () { return Object.keys(this._rawQuery); - } + }; /** * Set a query parameter with the provided name and value. If the parameterValue is undefined or * empty, then this will attempt to remove an existing query parameter with the provided * parameterName. */ - set(parameterName, parameterValue) { - const caseParameterValue = parameterValue; + URLQuery.prototype.set = function (parameterName, parameterValue) { + var caseParameterValue = parameterValue; if (parameterName) { if (caseParameterValue !== undefined && caseParameterValue !== null) { - const newValue = Array.isArray(caseParameterValue) + var newValue = Array.isArray(caseParameterValue) ? caseParameterValue : caseParameterValue.toString(); this._rawQuery[parameterName] = newValue; @@ -10775,51 +11704,52 @@ class URLQuery { delete this._rawQuery[parameterName]; } } - } + }; /** * Get the value of the query parameter with the provided name. If no parameter exists with the * provided parameter name, then undefined will be returned. */ - get(parameterName) { + URLQuery.prototype.get = function (parameterName) { return parameterName ? this._rawQuery[parameterName] : undefined; - } + }; /** * Get the string representation of this query. The return value will not start with a "?". */ - toString() { - let result = ""; - for (const parameterName in this._rawQuery) { + URLQuery.prototype.toString = function () { + var result = ""; + for (var parameterName in this._rawQuery) { if (result) { result += "&"; } - const parameterValue = this._rawQuery[parameterName]; + var parameterValue = this._rawQuery[parameterName]; if (Array.isArray(parameterValue)) { - const parameterStrings = []; - for (const parameterValueElement of parameterValue) { - parameterStrings.push(`${parameterName}=${parameterValueElement}`); + var parameterStrings = []; + for (var _i = 0, parameterValue_1 = parameterValue; _i < parameterValue_1.length; _i++) { + var parameterValueElement = parameterValue_1[_i]; + parameterStrings.push(parameterName + "=" + parameterValueElement); } result += parameterStrings.join("&"); } else { - result += `${parameterName}=${parameterValue}`; + result += parameterName + "=" + parameterValue; } } return result; - } + }; /** * Parse a URLQuery from the provided text. */ - static parse(text) { - const result = new URLQuery(); + URLQuery.parse = function (text) { + var result = new URLQuery(); if (text) { if (text.startsWith("?")) { text = text.substring(1); } - let currentState = "ParameterName"; - let parameterName = ""; - let parameterValue = ""; - for (let i = 0; i < text.length; ++i) { - const currentCharacter = text[i]; + var currentState = "ParameterName"; + var parameterName = ""; + var parameterValue = ""; + for (var i = 0; i < text.length; ++i) { + var currentCharacter = text[i]; switch (currentState) { case "ParameterName": switch (currentCharacter) { @@ -10857,78 +11787,81 @@ class URLQuery { } } return result; - } -} + }; + return URLQuery; +}()); /** * A class that handles creating, modifying, and parsing URLs. */ -class URLBuilder { +var URLBuilder = /** @class */ (function () { + function URLBuilder() { + } /** * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL * (such as a host, port, path, or query), those parts will be added to this URL as well. */ - setScheme(scheme) { + URLBuilder.prototype.setScheme = function (scheme) { if (!scheme) { this._scheme = undefined; } else { this.set(scheme, "SCHEME"); } - } + }; /** * Get the scheme that has been set in this URL. */ - getScheme() { + URLBuilder.prototype.getScheme = function () { return this._scheme; - } + }; /** * Set the host for this URL. If the provided host contains other parts of a URL (such as a * port, path, or query), those parts will be added to this URL as well. */ - setHost(host) { + URLBuilder.prototype.setHost = function (host) { if (!host) { this._host = undefined; } else { this.set(host, "SCHEME_OR_HOST"); } - } + }; /** * Get the host that has been set in this URL. */ - getHost() { + URLBuilder.prototype.getHost = function () { return this._host; - } + }; /** * Set the port for this URL. If the provided port contains other parts of a URL (such as a * path or query), those parts will be added to this URL as well. */ - setPort(port) { + URLBuilder.prototype.setPort = function (port) { if (port === undefined || port === null || port === "") { this._port = undefined; } else { this.set(port.toString(), "PORT"); } - } + }; /** * Get the port that has been set in this URL. */ - getPort() { + URLBuilder.prototype.getPort = function () { return this._port; - } + }; /** * Set the path for this URL. If the provided path contains a query, then it will be added to * this URL as well. */ - setPath(path) { + URLBuilder.prototype.setPath = function (path) { if (!path) { this._path = undefined; } else { - const schemeIndex = path.indexOf("://"); + var schemeIndex = path.indexOf("://"); if (schemeIndex !== -1) { - const schemeStart = path.lastIndexOf("/", schemeIndex); + var schemeStart = path.lastIndexOf("/", schemeIndex); // Make sure to only grab the URL part of the path before setting the state back to SCHEME // this will handle cases such as "/a/b/c/https://microsoft.com" => "https://microsoft.com" this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), "SCHEME"); @@ -10937,14 +11870,14 @@ class URLBuilder { this.set(path, "PATH"); } } - } + }; /** * Append the provided path to this URL's existing path. If the provided path contains a query, * then it will be added to this URL as well. */ - appendPath(path) { + URLBuilder.prototype.appendPath = function (path) { if (path) { - let currentPath = this.getPath(); + var currentPath = this.getPath(); if (currentPath) { if (!currentPath.endsWith("/")) { currentPath += "/"; @@ -10956,58 +11889,58 @@ class URLBuilder { } this.set(path, "PATH"); } - } + }; /** * Get the path that has been set in this URL. */ - getPath() { + URLBuilder.prototype.getPath = function () { return this._path; - } + }; /** * Set the query in this URL. */ - setQuery(query) { + URLBuilder.prototype.setQuery = function (query) { if (!query) { this._query = undefined; } else { this._query = URLQuery.parse(query); } - } + }; /** * Set a query parameter with the provided name and value in this URL's query. If the provided * query parameter value is undefined or empty, then the query parameter will be removed if it * existed. */ - setQueryParameter(queryParameterName, queryParameterValue) { + URLBuilder.prototype.setQueryParameter = function (queryParameterName, queryParameterValue) { if (queryParameterName) { if (!this._query) { this._query = new URLQuery(); } this._query.set(queryParameterName, queryParameterValue); } - } + }; /** * Get the value of the query parameter with the provided query parameter name. If no query * parameter exists with the provided name, then undefined will be returned. */ - getQueryParameterValue(queryParameterName) { + URLBuilder.prototype.getQueryParameterValue = function (queryParameterName) { return this._query ? this._query.get(queryParameterName) : undefined; - } + }; /** * Get the query in this URL. */ - getQuery() { + URLBuilder.prototype.getQuery = function () { return this._query ? this._query.toString() : undefined; - } + }; /** * Set the parts of this URL by parsing the provided text using the provided startState. */ - set(text, startState) { - const tokenizer = new URLTokenizer(text, startState); + URLBuilder.prototype.set = function (text, startState) { + var tokenizer = new URLTokenizer(text, startState); while (tokenizer.next()) { - const token = tokenizer.current(); - let tokenPath; + var token = tokenizer.current(); + var tokenPath = void 0; if (token) { switch (token.type) { case "SCHEME": @@ -11029,21 +11962,21 @@ class URLBuilder { this._query = URLQuery.parse(token.text); break; default: - throw new Error(`Unrecognized URLTokenType: ${token.type}`); + throw new Error("Unrecognized URLTokenType: " + token.type); } } } - } - toString() { - let result = ""; + }; + URLBuilder.prototype.toString = function () { + var result = ""; if (this._scheme) { - result += `${this._scheme}://`; + result += this._scheme + "://"; } if (this._host) { result += this._host; } if (this._port) { - result += `:${this._port}`; + result += ":" + this._port; } if (this._path) { if (!this._path.startsWith("/")) { @@ -11052,15 +11985,15 @@ class URLBuilder { result += this._path; } if (this._query && this._query.any()) { - result += `?${this._query.toString()}`; + result += "?" + this._query.toString(); } return result; - } + }; /** * If the provided searchValue is found in this URLBuilder, then replace it with the provided * replaceValue. */ - replaceAll(searchValue, replaceValue) { + URLBuilder.prototype.replaceAll = function (searchValue, replaceValue) { if (searchValue) { this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue)); this.setHost(replaceAll(this.getHost(), searchValue, replaceValue)); @@ -11068,40 +12001,42 @@ class URLBuilder { this.setPath(replaceAll(this.getPath(), searchValue, replaceValue)); this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); } - } - static parse(text) { - const result = new URLBuilder(); + }; + URLBuilder.parse = function (text) { + var result = new URLBuilder(); result.set(text, "SCHEME_OR_HOST"); return result; - } -} -class URLToken { - constructor(text, type) { + }; + return URLBuilder; +}()); +var URLToken = /** @class */ (function () { + function URLToken(text, type) { this.text = text; this.type = type; } - static scheme(text) { + URLToken.scheme = function (text) { return new URLToken(text, "SCHEME"); - } - static host(text) { + }; + URLToken.host = function (text) { return new URLToken(text, "HOST"); - } - static port(text) { + }; + URLToken.port = function (text) { return new URLToken(text, "PORT"); - } - static path(text) { + }; + URLToken.path = function (text) { return new URLToken(text, "PATH"); - } - static query(text) { + }; + URLToken.query = function (text) { return new URLToken(text, "QUERY"); - } -} + }; + return URLToken; +}()); /** * Get whether or not the provided character (single character string) is an alphanumeric (letter or * digit) character. */ function isAlphaNumericCharacter(character) { - const characterCode = character.charCodeAt(0); + var characterCode = character.charCodeAt(0); return ((48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ || (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ || (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */); @@ -11109,8 +12044,8 @@ function isAlphaNumericCharacter(character) { /** * A class that tokenizes URL strings. */ -class URLTokenizer { - constructor(_text, state) { +var URLTokenizer = /** @class */ (function () { + function URLTokenizer(_text, state) { this._text = _text; this._textLength = _text ? _text.length : 0; this._currentState = state !== undefined && state !== null ? state : "SCHEME_OR_HOST"; @@ -11120,13 +12055,13 @@ class URLTokenizer { * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer * hasn't started or has finished tokenizing. */ - current() { + URLTokenizer.prototype.current = function () { return this._currentToken; - } + }; /** * Advance to the next URLToken and return whether or not a URLToken was found. */ - next() { + URLTokenizer.prototype.next = function () { if (!hasCurrentCharacter(this)) { this._currentToken = undefined; } @@ -11151,17 +12086,18 @@ class URLTokenizer { nextQuery(this); break; default: - throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`); + throw new Error("Unrecognized URLTokenizerState: " + this._currentState); } } return !!this._currentToken; - } -} + }; + return URLTokenizer; +}()); /** * Read the remaining characters from this Tokenizer's character stream. */ function readRemaining(tokenizer) { - let result = ""; + var result = ""; if (tokenizer._currentIndex < tokenizer._textLength) { result = tokenizer._text.substring(tokenizer._currentIndex); tokenizer._currentIndex = tokenizer._textLength; @@ -11197,7 +12133,7 @@ function nextCharacter(tokenizer, step) { * Tokenizer's stream of characters. */ function peekCharacters(tokenizer, charactersToPeek) { - let endIndex = tokenizer._currentIndex + charactersToPeek; + var endIndex = tokenizer._currentIndex + charactersToPeek; if (tokenizer._textLength < endIndex) { endIndex = tokenizer._textLength; } @@ -11208,9 +12144,9 @@ function peekCharacters(tokenizer, charactersToPeek) { * is false when provided the current character. */ function readWhile(tokenizer, condition) { - let result = ""; + var result = ""; while (hasCurrentCharacter(tokenizer)) { - const currentCharacter = getCurrentCharacter(tokenizer); + var currentCharacter = getCurrentCharacter(tokenizer); if (!condition(currentCharacter)) { break; } @@ -11226,17 +12162,21 @@ function readWhile(tokenizer, condition) { * character stream is reached. */ function readWhileLetterOrDigit(tokenizer) { - return readWhile(tokenizer, (character) => isAlphaNumericCharacter(character)); + return readWhile(tokenizer, function (character) { return isAlphaNumericCharacter(character); }); } /** * Read characters from this Tokenizer until one of the provided terminating characters is read or * the end of the character stream is reached. */ -function readUntilCharacter(tokenizer, ...terminatingCharacters) { - return readWhile(tokenizer, (character) => terminatingCharacters.indexOf(character) === -1); +function readUntilCharacter(tokenizer) { + var terminatingCharacters = []; + for (var _i = 1; _i < arguments.length; _i++) { + terminatingCharacters[_i - 1] = arguments[_i]; + } + return readWhile(tokenizer, function (character) { return terminatingCharacters.indexOf(character) === -1; }); } function nextScheme(tokenizer) { - const scheme = readWhileLetterOrDigit(tokenizer); + var scheme = readWhileLetterOrDigit(tokenizer); tokenizer._currentToken = URLToken.scheme(scheme); if (!hasCurrentCharacter(tokenizer)) { tokenizer._currentState = "DONE"; @@ -11246,7 +12186,7 @@ function nextScheme(tokenizer) { } } function nextSchemeOrHost(tokenizer) { - const schemeOrHost = readUntilCharacter(tokenizer, ":", "/", "?"); + var schemeOrHost = readUntilCharacter(tokenizer, ":", "/", "?"); if (!hasCurrentCharacter(tokenizer)) { tokenizer._currentToken = URLToken.host(schemeOrHost); tokenizer._currentState = "DONE"; @@ -11275,7 +12215,7 @@ function nextHost(tokenizer) { if (peekCharacters(tokenizer, 3) === "://") { nextCharacter(tokenizer, 3); } - const host = readUntilCharacter(tokenizer, ":", "/", "?"); + var host = readUntilCharacter(tokenizer, ":", "/", "?"); tokenizer._currentToken = URLToken.host(host); if (!hasCurrentCharacter(tokenizer)) { tokenizer._currentState = "DONE"; @@ -11294,7 +12234,7 @@ function nextPort(tokenizer) { if (getCurrentCharacter(tokenizer) === ":") { nextCharacter(tokenizer); } - const port = readUntilCharacter(tokenizer, "/", "?"); + var port = readUntilCharacter(tokenizer, "/", "?"); tokenizer._currentToken = URLToken.port(port); if (!hasCurrentCharacter(tokenizer)) { tokenizer._currentState = "DONE"; @@ -11307,7 +12247,7 @@ function nextPort(tokenizer) { } } function nextPath(tokenizer) { - const path = readUntilCharacter(tokenizer, "?"); + var path = readUntilCharacter(tokenizer, "?"); tokenizer._currentToken = URLToken.path(path); if (!hasCurrentCharacter(tokenizer)) { tokenizer._currentState = "DONE"; @@ -11320,14 +12260,14 @@ function nextQuery(tokenizer) { if (getCurrentCharacter(tokenizer) === "?") { nextCharacter(tokenizer); } - const query = readRemaining(tokenizer); + var query = readRemaining(tokenizer); tokenizer._currentToken = URLToken.query(query); tokenizer._currentState = "DONE"; } // Copyright (c) Microsoft Corporation. -const RedactedString = "REDACTED"; -const defaultAllowedHeaderNames = [ +var RedactedString = "REDACTED"; +var defaultAllowedHeaderNames = [ "x-ms-client-request-id", "x-ms-return-client-request-id", "x-ms-useragent", @@ -11367,33 +12307,35 @@ const defaultAllowedHeaderNames = [ "Transfer-Encoding", "User-Agent" ]; -const defaultAllowedQueryParameters = ["api-version"]; -class Sanitizer { - constructor({ allowedHeaderNames = [], allowedQueryParameters = [] } = {}) { +var defaultAllowedQueryParameters = ["api-version"]; +var Sanitizer = /** @class */ (function () { + function Sanitizer(_a) { + var _b = _a === void 0 ? {} : _a, _c = _b.allowedHeaderNames, allowedHeaderNames = _c === void 0 ? [] : _c, _d = _b.allowedQueryParameters, allowedQueryParameters = _d === void 0 ? [] : _d; allowedHeaderNames = Array.isArray(allowedHeaderNames) ? defaultAllowedHeaderNames.concat(allowedHeaderNames) : defaultAllowedHeaderNames; allowedQueryParameters = Array.isArray(allowedQueryParameters) ? defaultAllowedQueryParameters.concat(allowedQueryParameters) : defaultAllowedQueryParameters; - this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase())); - this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase())); + this.allowedHeaderNames = new Set(allowedHeaderNames.map(function (n) { return n.toLowerCase(); })); + this.allowedQueryParameters = new Set(allowedQueryParameters.map(function (p) { return p.toLowerCase(); })); } - sanitize(obj) { - const seen = new Set(); - return JSON.stringify(obj, (key, value) => { + Sanitizer.prototype.sanitize = function (obj) { + var _this = this; + var seen = new Set(); + return JSON.stringify(obj, function (key, value) { // Ensure Errors include their interesting non-enumerable members if (value instanceof Error) { - return Object.assign(Object.assign({}, value), { name: value.name, message: value.message }); + return tslib.__assign(tslib.__assign({}, value), { name: value.name, message: value.message }); } if (key === "_headersMap") { - return this.sanitizeHeaders(value); + return _this.sanitizeHeaders(value); } else if (key === "url") { - return this.sanitizeUrl(value); + return _this.sanitizeUrl(value); } else if (key === "query") { - return this.sanitizeQuery(value); + return _this.sanitizeQuery(value); } else if (key === "body") { // Don't log the request body @@ -11416,19 +12358,20 @@ class Sanitizer { } return value; }, 2); - } - sanitizeHeaders(value) { - return this.sanitizeObject(value, this.allowedHeaderNames, (v, k) => v[k].value); - } - sanitizeQuery(value) { - return this.sanitizeObject(value, this.allowedQueryParameters, (v, k) => v[k]); - } - sanitizeObject(value, allowedKeys, accessor) { + }; + Sanitizer.prototype.sanitizeHeaders = function (value) { + return this.sanitizeObject(value, this.allowedHeaderNames, function (v, k) { return v[k].value; }); + }; + Sanitizer.prototype.sanitizeQuery = function (value) { + return this.sanitizeObject(value, this.allowedQueryParameters, function (v, k) { return v[k]; }); + }; + Sanitizer.prototype.sanitizeObject = function (value, allowedKeys, accessor) { if (typeof value !== "object" || value === null) { return value; } - const sanitized = {}; - for (const k of Object.keys(value)) { + var sanitized = {}; + for (var _i = 0, _a = Object.keys(value); _i < _a.length; _i++) { + var k = _a[_i]; if (allowedKeys.has(k.toLowerCase())) { sanitized[k] = accessor(value, k); } @@ -11437,230 +12380,267 @@ class Sanitizer { } } return sanitized; - } - sanitizeUrl(value) { + }; + Sanitizer.prototype.sanitizeUrl = function (value) { if (typeof value !== "string" || value === null) { return value; } - const urlBuilder = URLBuilder.parse(value); - const queryString = urlBuilder.getQuery(); + var urlBuilder = URLBuilder.parse(value); + var queryString = urlBuilder.getQuery(); if (!queryString) { return value; } - const query = URLQuery.parse(queryString); - for (const k of query.keys()) { + var query = URLQuery.parse(queryString); + for (var _i = 0, _a = query.keys(); _i < _a.length; _i++) { + var k = _a[_i]; if (!this.allowedQueryParameters.has(k.toLowerCase())) { query.set(k, RedactedString); } } urlBuilder.setQuery(query.toString()); return urlBuilder.toString(); - } -} + }; + return Sanitizer; +}()); // Copyright (c) Microsoft Corporation. -const errorSanitizer = new Sanitizer(); -class RestError extends Error { - constructor(message, code, statusCode, request, response) { - super(message); - this.name = "RestError"; - this.code = code; - this.statusCode = statusCode; - this.request = request; - this.response = response; - Object.setPrototypeOf(this, RestError.prototype); +var errorSanitizer = new Sanitizer(); +var RestError = /** @class */ (function (_super) { + tslib.__extends(RestError, _super); + function RestError(message, code, statusCode, request, response) { + var _this = _super.call(this, message) || this; + _this.name = "RestError"; + _this.code = code; + _this.statusCode = statusCode; + _this.request = request; + _this.response = response; + Object.setPrototypeOf(_this, RestError.prototype); + return _this; } /** * Logging method for util.inspect in Node */ - [custom]() { - return `RestError: ${this.message} \n ${errorSanitizer.sanitize(this)}`; - } -} -RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; -RestError.PARSE_ERROR = "PARSE_ERROR"; + RestError.prototype[custom] = function () { + return "RestError: " + this.message + " \n " + errorSanitizer.sanitize(this); + }; + RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; + RestError.PARSE_ERROR = "PARSE_ERROR"; + return RestError; +}(Error)); // Copyright (c) Microsoft Corporation. -const logger = logger$1.createClientLogger("core-http"); +var logger = logger$1.createClientLogger("core-http"); // Copyright (c) Microsoft Corporation. -class ReportTransform extends stream.Transform { - constructor(progressCallback) { - super(); - this.progressCallback = progressCallback; - this.loadedBytes = 0; +var ReportTransform = /** @class */ (function (_super) { + tslib.__extends(ReportTransform, _super); + function ReportTransform(progressCallback) { + var _this = _super.call(this) || this; + _this.progressCallback = progressCallback; + _this.loadedBytes = 0; + return _this; } - _transform(chunk, _encoding, callback) { + ReportTransform.prototype._transform = function (chunk, _encoding, callback) { this.push(chunk); this.loadedBytes += chunk.length; this.progressCallback({ loadedBytes: this.loadedBytes }); callback(undefined); + }; + return ReportTransform; +}(stream.Transform)); +var FetchHttpClient = /** @class */ (function () { + function FetchHttpClient() { } -} -class FetchHttpClient { - async sendRequest(httpRequest) { + FetchHttpClient.prototype.sendRequest = function (httpRequest) { var _a; - if (!httpRequest && typeof httpRequest !== "object") { - throw new Error("'httpRequest' (WebResourceLike) cannot be null or undefined and must be of type object."); - } - const abortController$1 = new abortController.AbortController(); - let abortListener; - if (httpRequest.abortSignal) { - if (httpRequest.abortSignal.aborted) { - throw new abortController.AbortError("The operation was aborted."); - } - abortListener = (event) => { - if (event.type === "abort") { - abortController$1.abort(); - } - }; - httpRequest.abortSignal.addEventListener("abort", abortListener); - } - if (httpRequest.timeout) { - setTimeout(() => { - abortController$1.abort(); - }, httpRequest.timeout); - } - if (httpRequest.formData) { - const formData = httpRequest.formData; - const requestForm = new FormData(); - const appendFormValue = (key, value) => { - // value function probably returns a stream so we can provide a fresh stream on each retry - if (typeof value === "function") { - value = value(); - } - if (value && - Object.prototype.hasOwnProperty.call(value, "value") && - Object.prototype.hasOwnProperty.call(value, "options")) { - requestForm.append(key, value.value, value.options); - } - else { - requestForm.append(key, value); - } - }; - for (const formKey of Object.keys(formData)) { - const formValue = formData[formKey]; - if (Array.isArray(formValue)) { - for (let j = 0; j < formValue.length; j++) { - appendFormValue(formKey, formValue[j]); - } - } - else { - appendFormValue(formKey, formValue); - } - } - httpRequest.body = requestForm; - httpRequest.formData = undefined; - const contentType = httpRequest.headers.get("Content-Type"); - if (contentType && contentType.indexOf("multipart/form-data") !== -1) { - if (typeof requestForm.getBoundary === "function") { - httpRequest.headers.set("Content-Type", `multipart/form-data; boundary=${requestForm.getBoundary()}`); - } - else { - // browser will automatically apply a suitable content-type header - httpRequest.headers.remove("Content-Type"); - } - } - } - let body = httpRequest.body - ? typeof httpRequest.body === "function" - ? httpRequest.body() - : httpRequest.body - : undefined; - if (httpRequest.onUploadProgress && httpRequest.body) { - const onUploadProgress = httpRequest.onUploadProgress; - const uploadReportStream = new ReportTransform(onUploadProgress); - if (isReadableStream(body)) { - body.pipe(uploadReportStream); - } - else { - uploadReportStream.end(body); - } - body = uploadReportStream; - } - const platformSpecificRequestInit = await this.prepareRequest(httpRequest); - const requestInit = Object.assign({ body: body, headers: httpRequest.headers.rawHeaders(), method: httpRequest.method, signal: abortController$1.signal, redirect: "manual" }, platformSpecificRequestInit); - let operationResponse; - try { - const response = await this.fetch(httpRequest.url, requestInit); - const headers = parseHeaders(response.headers); - const streaming = ((_a = httpRequest.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(response.status)) || - httpRequest.streamResponseBody; - operationResponse = { - headers: headers, - request: httpRequest, - status: response.status, - readableStreamBody: streaming - ? response.body - : undefined, - bodyAsText: !streaming ? await response.text() : undefined - }; - const onDownloadProgress = httpRequest.onDownloadProgress; - if (onDownloadProgress) { - const responseBody = response.body || undefined; - if (isReadableStream(responseBody)) { - const downloadReportStream = new ReportTransform(onDownloadProgress); - responseBody.pipe(downloadReportStream); - operationResponse.readableStreamBody = downloadReportStream; - } - else { - const length = parseInt(headers.get("Content-Length")) || undefined; - if (length) { - // Calling callback for non-stream response for consistency with browser - onDownloadProgress({ loadedBytes: length }); - } + return tslib.__awaiter(this, void 0, void 0, function () { + var abortController$1, abortListener, formData, requestForm_1, appendFormValue, _i, _b, formKey, formValue, j, contentType, body, onUploadProgress, uploadReportStream, platformSpecificRequestInit, requestInit, operationResponse, response, headers, streaming, _c, onDownloadProgress, responseBody, downloadReportStream, length_1, error_1, fetchError, uploadStreamDone, downloadStreamDone; + var _d; + return tslib.__generator(this, function (_e) { + switch (_e.label) { + case 0: + if (!httpRequest && typeof httpRequest !== "object") { + throw new Error("'httpRequest' (WebResourceLike) cannot be null or undefined and must be of type object."); + } + abortController$1 = new abortController.AbortController(); + if (httpRequest.abortSignal) { + if (httpRequest.abortSignal.aborted) { + throw new abortController.AbortError("The operation was aborted."); + } + abortListener = function (event) { + if (event.type === "abort") { + abortController$1.abort(); + } + }; + httpRequest.abortSignal.addEventListener("abort", abortListener); + } + if (httpRequest.timeout) { + setTimeout(function () { + abortController$1.abort(); + }, httpRequest.timeout); + } + if (httpRequest.formData) { + formData = httpRequest.formData; + requestForm_1 = new FormData(); + appendFormValue = function (key, value) { + // value function probably returns a stream so we can provide a fresh stream on each retry + if (typeof value === "function") { + value = value(); + } + if (value && + Object.prototype.hasOwnProperty.call(value, "value") && + Object.prototype.hasOwnProperty.call(value, "options")) { + requestForm_1.append(key, value.value, value.options); + } + else { + requestForm_1.append(key, value); + } + }; + for (_i = 0, _b = Object.keys(formData); _i < _b.length; _i++) { + formKey = _b[_i]; + formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } + else { + appendFormValue(formKey, formValue); + } + } + httpRequest.body = requestForm_1; + httpRequest.formData = undefined; + contentType = httpRequest.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + if (typeof requestForm_1.getBoundary === "function") { + httpRequest.headers.set("Content-Type", "multipart/form-data; boundary=" + requestForm_1.getBoundary()); + } + else { + // browser will automatically apply a suitable content-type header + httpRequest.headers.remove("Content-Type"); + } + } + } + body = httpRequest.body + ? typeof httpRequest.body === "function" + ? httpRequest.body() + : httpRequest.body + : undefined; + if (httpRequest.onUploadProgress && httpRequest.body) { + onUploadProgress = httpRequest.onUploadProgress; + uploadReportStream = new ReportTransform(onUploadProgress); + if (isReadableStream(body)) { + body.pipe(uploadReportStream); + } + else { + uploadReportStream.end(body); + } + body = uploadReportStream; + } + return [4 /*yield*/, this.prepareRequest(httpRequest)]; + case 1: + platformSpecificRequestInit = _e.sent(); + requestInit = tslib.__assign({ body: body, headers: httpRequest.headers.rawHeaders(), method: httpRequest.method, signal: abortController$1.signal, redirect: "manual" }, platformSpecificRequestInit); + _e.label = 2; + case 2: + _e.trys.push([2, 8, 9, 10]); + return [4 /*yield*/, this.fetch(httpRequest.url, requestInit)]; + case 3: + response = _e.sent(); + headers = parseHeaders(response.headers); + streaming = ((_a = httpRequest.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(response.status)) || + httpRequest.streamResponseBody; + _d = { + headers: headers, + request: httpRequest, + status: response.status, + readableStreamBody: streaming + ? response.body + : undefined + }; + if (!!streaming) return [3 /*break*/, 5]; + return [4 /*yield*/, response.text()]; + case 4: + _c = _e.sent(); + return [3 /*break*/, 6]; + case 5: + _c = undefined; + _e.label = 6; + case 6: + operationResponse = (_d.bodyAsText = _c, + _d); + onDownloadProgress = httpRequest.onDownloadProgress; + if (onDownloadProgress) { + responseBody = response.body || undefined; + if (isReadableStream(responseBody)) { + downloadReportStream = new ReportTransform(onDownloadProgress); + responseBody.pipe(downloadReportStream); + operationResponse.readableStreamBody = downloadReportStream; + } + else { + length_1 = parseInt(headers.get("Content-Length")) || undefined; + if (length_1) { + // Calling callback for non-stream response for consistency with browser + onDownloadProgress({ loadedBytes: length_1 }); + } + } + } + return [4 /*yield*/, this.processRequest(operationResponse)]; + case 7: + _e.sent(); + return [2 /*return*/, operationResponse]; + case 8: + error_1 = _e.sent(); + fetchError = error_1; + if (fetchError.code === "ENOTFOUND") { + throw new RestError(fetchError.message, RestError.REQUEST_SEND_ERROR, undefined, httpRequest); + } + else if (fetchError.type === "aborted") { + throw new abortController.AbortError("The operation was aborted."); + } + throw fetchError; + case 9: + // clean up event listener + if (httpRequest.abortSignal && abortListener) { + uploadStreamDone = Promise.resolve(); + if (isReadableStream(body)) { + uploadStreamDone = isStreamComplete(body); + } + downloadStreamDone = Promise.resolve(); + if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) { + downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody); + } + Promise.all([uploadStreamDone, downloadStreamDone]) + .then(function () { + var _a; + (_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); + return; + }) + .catch(function (e) { + logger.warning("Error when cleaning up abortListener on httpRequest", e); + }); + } + return [7 /*endfinally*/]; + case 10: return [2 /*return*/]; } - } - await this.processRequest(operationResponse); - return operationResponse; - } - catch (error) { - const fetchError = error; - if (fetchError.code === "ENOTFOUND") { - throw new RestError(fetchError.message, RestError.REQUEST_SEND_ERROR, undefined, httpRequest); - } - else if (fetchError.type === "aborted") { - throw new abortController.AbortError("The operation was aborted."); - } - throw fetchError; - } - finally { - // clean up event listener - if (httpRequest.abortSignal && abortListener) { - let uploadStreamDone = Promise.resolve(); - if (isReadableStream(body)) { - uploadStreamDone = isStreamComplete(body); - } - let downloadStreamDone = Promise.resolve(); - if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) { - downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody); - } - Promise.all([uploadStreamDone, downloadStreamDone]) - .then(() => { - var _a; - (_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); - return; - }) - .catch((e) => { - logger.warning("Error when cleaning up abortListener on httpRequest", e); - }); - } - } - } -} + }); + }); + }; + return FetchHttpClient; +}()); function isReadableStream(body) { return body && typeof body.pipe === "function"; } function isStreamComplete(stream) { - return new Promise((resolve) => { + return new Promise(function (resolve) { stream.on("close", resolve); stream.on("end", resolve); stream.on("error", resolve); }); } function parseHeaders(headers) { - const httpHeaders = new HttpHeaders(); - headers.forEach((value, key) => { + var httpHeaders = new HttpHeaders(); + headers.forEach(function (value, key) { httpHeaders.set(key, value); }); return httpHeaders; @@ -11668,14 +12648,14 @@ function parseHeaders(headers) { // Copyright (c) Microsoft Corporation. function createProxyAgent(requestUrl, proxySettings, headers) { - const host = URLBuilder.parse(proxySettings.host).getHost(); + var host = URLBuilder.parse(proxySettings.host).getHost(); if (!host) { throw new Error("Expecting a non-empty host in proxy settings."); } if (!isValidPort(proxySettings.port)) { throw new Error("Expecting a valid port number in the range of [0, 65535] in proxy settings."); } - const tunnelOptions = { + var tunnelOptions = { proxy: { host: host, port: proxySettings.port, @@ -11683,21 +12663,18 @@ function createProxyAgent(requestUrl, proxySettings, headers) { } }; if (proxySettings.username && proxySettings.password) { - tunnelOptions.proxy.proxyAuth = `${proxySettings.username}:${proxySettings.password}`; - } - else if (proxySettings.username) { - tunnelOptions.proxy.proxyAuth = `${proxySettings.username}`; + tunnelOptions.proxy.proxyAuth = proxySettings.username + ":" + proxySettings.password; } - const isRequestHttps = isUrlHttps(requestUrl); - const isProxyHttps = isUrlHttps(proxySettings.host); - const proxyAgent = { + var isRequestHttps = isUrlHttps(requestUrl); + var isProxyHttps = isUrlHttps(proxySettings.host); + var proxyAgent = { isHttps: isRequestHttps, agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) }; return proxyAgent; } function isUrlHttps(url) { - const urlScheme = URLBuilder.parse(url).getScheme() || ""; + var urlScheme = URLBuilder.parse(url).getScheme() || ""; return urlScheme.toLowerCase() === "https"; } function createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) { @@ -11724,24 +12701,26 @@ function isValidPort(port) { function getCachedAgent(isHttps, agentCache) { return isHttps ? agentCache.httpsAgent : agentCache.httpAgent; } -class NodeFetchHttpClient extends FetchHttpClient { - constructor() { - super(...arguments); - this.proxyAgents = {}; - this.keepAliveAgents = {}; - this.cookieJar = new tough.CookieJar(undefined, { looseMode: true }); +var NodeFetchHttpClient = /** @class */ (function (_super) { + tslib.__extends(NodeFetchHttpClient, _super); + function NodeFetchHttpClient() { + var _this = _super !== null && _super.apply(this, arguments) || this; + _this.proxyAgents = {}; + _this.keepAliveAgents = {}; + _this.cookieJar = new tough.CookieJar(undefined, { looseMode: true }); + return _this; } - getOrCreateAgent(httpRequest) { - const isHttps = isUrlHttps(httpRequest.url); + NodeFetchHttpClient.prototype.getOrCreateAgent = function (httpRequest) { + var isHttps = isUrlHttps(httpRequest.url); // At the moment, proxy settings and keepAlive are mutually // exclusive because the 'tunnel' library currently lacks the // ability to create a proxy with keepAlive turned on. if (httpRequest.proxySettings) { - let agent = getCachedAgent(isHttps, this.proxyAgents); + var agent = getCachedAgent(isHttps, this.proxyAgents); if (agent) { return agent; } - const tunnel = createProxyAgent(httpRequest.url, httpRequest.proxySettings, httpRequest.headers); + var tunnel = createProxyAgent(httpRequest.url, httpRequest.proxySettings, httpRequest.headers); agent = tunnel.agent; if (tunnel.isHttps) { this.proxyAgents.httpsAgent = tunnel.agent; @@ -11752,11 +12731,11 @@ class NodeFetchHttpClient extends FetchHttpClient { return agent; } else if (httpRequest.keepAlive) { - let agent = getCachedAgent(isHttps, this.keepAliveAgents); + var agent = getCachedAgent(isHttps, this.keepAliveAgents); if (agent) { return agent; } - const agentOptions = { + var agentOptions = { keepAlive: httpRequest.keepAlive }; if (isHttps) { @@ -11770,49 +12749,77 @@ class NodeFetchHttpClient extends FetchHttpClient { else { return isHttps ? https.globalAgent : http.globalAgent; } - } + }; // eslint-disable-next-line @azure/azure-sdk/ts-apisurface-standardized-verbs - async fetch(input, init) { - return node_fetch(input, init); - } - async prepareRequest(httpRequest) { - const requestInit = {}; - if (this.cookieJar && !httpRequest.headers.get("Cookie")) { - const cookieString = await new Promise((resolve, reject) => { - this.cookieJar.getCookieString(httpRequest.url, (err, cookie) => { - if (err) { - reject(err); - } - else { - resolve(cookie); - } - }); + NodeFetchHttpClient.prototype.fetch = function (input, init) { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + return [2 /*return*/, node_fetch(input, init)]; }); - httpRequest.headers.set("Cookie", cookieString); - } - // Set the http(s) agent - requestInit.agent = this.getOrCreateAgent(httpRequest); - requestInit.compress = httpRequest.decompressResponse; - return requestInit; - } - async processRequest(operationResponse) { - if (this.cookieJar) { - const setCookieHeader = operationResponse.headers.get("Set-Cookie"); - if (setCookieHeader !== undefined) { - await new Promise((resolve, reject) => { - this.cookieJar.setCookie(setCookieHeader, operationResponse.request.url, { ignoreError: true }, (err) => { - if (err) { - reject(err); - } - else { - resolve(); - } - }); - }); - } - } - } -} + }); + }; + NodeFetchHttpClient.prototype.prepareRequest = function (httpRequest) { + return tslib.__awaiter(this, void 0, void 0, function () { + var requestInit, cookieString; + var _this = this; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + requestInit = {}; + if (!(this.cookieJar && !httpRequest.headers.get("Cookie"))) return [3 /*break*/, 2]; + return [4 /*yield*/, new Promise(function (resolve, reject) { + _this.cookieJar.getCookieString(httpRequest.url, function (err, cookie) { + if (err) { + reject(err); + } + else { + resolve(cookie); + } + }); + })]; + case 1: + cookieString = _a.sent(); + httpRequest.headers.set("Cookie", cookieString); + _a.label = 2; + case 2: + // Set the http(s) agent + requestInit.agent = this.getOrCreateAgent(httpRequest); + requestInit.compress = httpRequest.decompressResponse; + return [2 /*return*/, requestInit]; + } + }); + }); + }; + NodeFetchHttpClient.prototype.processRequest = function (operationResponse) { + return tslib.__awaiter(this, void 0, void 0, function () { + var setCookieHeader_1; + var _this = this; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + if (!this.cookieJar) return [3 /*break*/, 2]; + setCookieHeader_1 = operationResponse.headers.get("Set-Cookie"); + if (!(setCookieHeader_1 !== undefined)) return [3 /*break*/, 2]; + return [4 /*yield*/, new Promise(function (resolve, reject) { + _this.cookieJar.setCookie(setCookieHeader_1, operationResponse.request.url, { ignoreError: true }, function (err) { + if (err) { + reject(err); + } + else { + resolve(); + } + }); + })]; + case 1: + _a.sent(); + _a.label = 2; + case 2: return [2 /*return*/]; + } + }); + }); + }; + return NodeFetchHttpClient; +}(FetchHttpClient)); // Copyright (c) Microsoft Corporation. (function (HttpPipelineLogLevel) { @@ -11840,10 +12847,10 @@ class NodeFetchHttpClient extends FetchHttpClient { * @param opts - OperationOptions object to convert to RequestOptionsBase */ function operationOptionsToRequestOptionsBase(opts) { - const { requestOptions, tracingOptions } = opts, additionalOptions = tslib.__rest(opts, ["requestOptions", "tracingOptions"]); - let result = additionalOptions; + var requestOptions = opts.requestOptions, tracingOptions = opts.tracingOptions, additionalOptions = tslib.__rest(opts, ["requestOptions", "tracingOptions"]); + var result = additionalOptions; if (requestOptions) { - result = Object.assign(Object.assign({}, result), requestOptions); + result = tslib.__assign(tslib.__assign({}, result), requestOptions); } if (tracingOptions) { result.spanOptions = tracingOptions.spanOptions; @@ -11853,8 +12860,8 @@ function operationOptionsToRequestOptionsBase(opts) { } // Copyright (c) Microsoft Corporation. -class BaseRequestPolicy { - constructor(_nextPolicy, _options) { +var BaseRequestPolicy = /** @class */ (function () { + function BaseRequestPolicy(_nextPolicy, _options) { this._nextPolicy = _nextPolicy; this._options = _options; } @@ -11863,24 +12870,25 @@ class BaseRequestPolicy { * @param logLevel - The log level of the log that will be logged. * @returns Whether or not a log with the provided log level should be logged. */ - shouldLog(logLevel) { + BaseRequestPolicy.prototype.shouldLog = function (logLevel) { return this._options.shouldLog(logLevel); - } + }; /** * Attempt to log the provided message to the provided logger. If no logger was provided or if * the log level does not meat the logger's threshold, then nothing will be logged. * @param logLevel - The log level of this log. * @param message - The message of this log. */ - log(logLevel, message) { + BaseRequestPolicy.prototype.log = function (logLevel, message) { this._options.log(logLevel, message); - } -} + }; + return BaseRequestPolicy; +}()); /** * Optional properties that can be used when creating a RequestPolicy. */ -class RequestPolicyOptions { - constructor(_logger) { +var RequestPolicyOptions = /** @class */ (function () { + function RequestPolicyOptions(_logger) { this._logger = _logger; } /** @@ -11888,89 +12896,104 @@ class RequestPolicyOptions { * @param logLevel - The log level of the log that will be logged. * @returns Whether or not a log with the provided log level should be logged. */ - shouldLog(logLevel) { + RequestPolicyOptions.prototype.shouldLog = function (logLevel) { return (!!this._logger && logLevel !== exports.HttpPipelineLogLevel.OFF && logLevel <= this._logger.minimumLogLevel); - } + }; /** * Attempt to log the provided message to the provided logger. If no logger was provided or if * the log level does not meet the logger's threshold, then nothing will be logged. * @param logLevel - The log level of this log. * @param message - The message of this log. */ - log(logLevel, message) { + RequestPolicyOptions.prototype.log = function (logLevel, message) { if (this._logger && this.shouldLog(logLevel)) { this._logger.log(logLevel, message); } - } -} + }; + return RequestPolicyOptions; +}()); // Copyright (c) Microsoft Corporation. -function logPolicy(loggingOptions = {}) { +function logPolicy(loggingOptions) { + if (loggingOptions === void 0) { loggingOptions = {}; } return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new LogPolicy(nextPolicy, options, loggingOptions); } }; } -class LogPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, { logger: logger$1 = logger.info, allowedHeaderNames = [], allowedQueryParameters = [] } = {}) { - super(nextPolicy, options); - this.logger = logger$1; - this.sanitizer = new Sanitizer({ allowedHeaderNames, allowedQueryParameters }); - } - /** - * Header names whose values will be logged when logging is enabled. Defaults to - * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers - * specified in this field will be added to that list. Any other values will - * be written to logs as "REDACTED". - * @deprecated Pass these into the constructor instead. - */ - get allowedHeaderNames() { - return this.sanitizer.allowedHeaderNames; - } - /** - * Header names whose values will be logged when logging is enabled. Defaults to - * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers - * specified in this field will be added to that list. Any other values will - * be written to logs as "REDACTED". - * @deprecated Pass these into the constructor instead. - */ - set allowedHeaderNames(allowedHeaderNames) { - this.sanitizer.allowedHeaderNames = allowedHeaderNames; - } - /** - * Query string names whose values will be logged when logging is enabled. By default no - * query string values are logged. - * @deprecated Pass these into the constructor instead. - */ - get allowedQueryParameters() { - return this.sanitizer.allowedQueryParameters; - } - /** - * Query string names whose values will be logged when logging is enabled. By default no - * query string values are logged. - * @deprecated Pass these into the constructor instead. - */ - set allowedQueryParameters(allowedQueryParameters) { - this.sanitizer.allowedQueryParameters = allowedQueryParameters; +var LogPolicy = /** @class */ (function (_super) { + tslib.__extends(LogPolicy, _super); + function LogPolicy(nextPolicy, options, _a) { + var _b = _a === void 0 ? {} : _a, _c = _b.logger, logger$1 = _c === void 0 ? logger.info : _c, _d = _b.allowedHeaderNames, allowedHeaderNames = _d === void 0 ? [] : _d, _e = _b.allowedQueryParameters, allowedQueryParameters = _e === void 0 ? [] : _e; + var _this = _super.call(this, nextPolicy, options) || this; + _this.logger = logger$1; + _this.sanitizer = new Sanitizer({ allowedHeaderNames: allowedHeaderNames, allowedQueryParameters: allowedQueryParameters }); + return _this; } - sendRequest(request) { + Object.defineProperty(LogPolicy.prototype, "allowedHeaderNames", { + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + get: function () { + return this.sanitizer.allowedHeaderNames; + }, + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + set: function (allowedHeaderNames) { + this.sanitizer.allowedHeaderNames = allowedHeaderNames; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(LogPolicy.prototype, "allowedQueryParameters", { + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + get: function () { + return this.sanitizer.allowedQueryParameters; + }, + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + set: function (allowedQueryParameters) { + this.sanitizer.allowedQueryParameters = allowedQueryParameters; + }, + enumerable: false, + configurable: true + }); + LogPolicy.prototype.sendRequest = function (request) { + var _this = this; if (!this.logger.enabled) return this._nextPolicy.sendRequest(request); this.logRequest(request); - return this._nextPolicy.sendRequest(request).then((response) => this.logResponse(response)); - } - logRequest(request) { - this.logger(`Request: ${this.sanitizer.sanitize(request)}`); - } - logResponse(response) { - this.logger(`Response status code: ${response.status}`); - this.logger(`Headers: ${this.sanitizer.sanitize(response.headers)}`); + return this._nextPolicy.sendRequest(request).then(function (response) { return _this.logResponse(response); }); + }; + LogPolicy.prototype.logRequest = function (request) { + this.logger("Request: " + this.sanitizer.sanitize(request)); + }; + LogPolicy.prototype.logResponse = function (response) { + this.logger("Response status code: " + response.status); + this.logger("Headers: " + this.sanitizer.sanitize(response.headers)); return response; - } -} + }; + return LogPolicy; +}(BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. @@ -11983,7 +13006,7 @@ function getPathStringFromParameter(parameter) { return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); } function getPathStringFromParameterPath(parameterPath, mapper) { - let result; + var result; if (typeof parameterPath === "string") { result = parameterPath; } @@ -12002,9 +13025,9 @@ function getPathStringFromParameterPath(parameterPath, mapper) { * @internal */ function getStreamResponseStatusCodes(operationSpec) { - const result = new Set(); - for (const statusCode in operationSpec.responses) { - const operationResponse = operationSpec.responses[statusCode]; + var result = new Set(); + for (var statusCode in operationSpec.responses) { + var operationResponse = operationSpec.responses[statusCode]; if (operationResponse.bodyMapper && operationResponse.bodyMapper.type.name === MapperType.Stream) { result.add(Number(statusCode)); @@ -12018,7 +13041,7 @@ function getStreamResponseStatusCodes(operationSpec) { // by the xm2js library is mutable. See https://github.com/Leonidas-from-XIV/node-xml2js/issues/536 // By creating a new copy of the settings each time we instantiate the parser, // we are safeguarding against the possibility of the default settings being mutated elsewhere unintentionally. -const xml2jsDefaultOptionsV2 = { +var xml2jsDefaultOptionsV2 = { explicitCharkey: false, trim: false, normalize: false, @@ -12059,10 +13082,10 @@ const xml2jsDefaultOptionsV2 = { cdata: false }; // The xml2js settings for general XML parsing operations. -const xml2jsParserSettings = Object.assign({}, xml2jsDefaultOptionsV2); +var xml2jsParserSettings = Object.assign({}, xml2jsDefaultOptionsV2); xml2jsParserSettings.explicitArray = false; // The xml2js settings for general XML building operations. -const xml2jsBuilderSettings = Object.assign({}, xml2jsDefaultOptionsV2); +var xml2jsBuilderSettings = Object.assign({}, xml2jsDefaultOptionsV2); xml2jsBuilderSettings.explicitArray = false; xml2jsBuilderSettings.renderOpts = { pretty: false @@ -12072,11 +13095,12 @@ xml2jsBuilderSettings.renderOpts = { * @param obj - JSON object to be converted into XML string * @param opts - Options that govern the parsing of given JSON object */ -function stringifyXML(obj, opts = {}) { +function stringifyXML(obj, opts) { var _a; + if (opts === void 0) { opts = {}; } xml2jsBuilderSettings.rootName = opts.rootName; xml2jsBuilderSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - const builder = new xml2js.Builder(xml2jsBuilderSettings); + var builder = new xml2js.Builder(xml2jsBuilderSettings); return builder.buildObject(obj); } /** @@ -12084,17 +13108,18 @@ function stringifyXML(obj, opts = {}) { * @param str - String containing the XML content to be parsed into JSON * @param opts - Options that govern the parsing of given xml string */ -function parseXML(str, opts = {}) { +function parseXML(str, opts) { var _a; + if (opts === void 0) { opts = {}; } xml2jsParserSettings.explicitRoot = !!opts.includeRoot; xml2jsParserSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - const xmlParser = new xml2js.Parser(xml2jsParserSettings); - return new Promise((resolve, reject) => { + var xmlParser = new xml2js.Parser(xml2jsParserSettings); + return new Promise(function (resolve, reject) { if (!str) { reject(new Error("Document is empty")); } else { - xmlParser.parseString(str, (err, res) => { + xmlParser.parseString(str, function (err, res) { if (err) { reject(err); } @@ -12113,14 +13138,14 @@ function parseXML(str, opts = {}) { */ function deserializationPolicy(deserializationContentTypes, parsingOptions) { return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new DeserializationPolicy(nextPolicy, options, deserializationContentTypes, parsingOptions); } }; } -const defaultJsonContentTypes = ["application/json", "text/json"]; -const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; -const DefaultDeserializationOptions = { +var defaultJsonContentTypes = ["application/json", "text/json"]; +var defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; +var DefaultDeserializationOptions = { expectedContentTypes: { json: defaultJsonContentTypes, xml: defaultXmlContentTypes @@ -12130,28 +13155,39 @@ const DefaultDeserializationOptions = { * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the * HTTP pipeline. */ -class DeserializationPolicy extends BaseRequestPolicy { - constructor(nextPolicy, requestPolicyOptions, deserializationContentTypes, parsingOptions = {}) { +var DeserializationPolicy = /** @class */ (function (_super) { + tslib.__extends(DeserializationPolicy, _super); + function DeserializationPolicy(nextPolicy, requestPolicyOptions, deserializationContentTypes, parsingOptions) { + if (parsingOptions === void 0) { parsingOptions = {}; } var _a; - super(nextPolicy, requestPolicyOptions); - this.jsonContentTypes = + var _this = _super.call(this, nextPolicy, requestPolicyOptions) || this; + _this.jsonContentTypes = (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes; - this.xmlContentTypes = + _this.xmlContentTypes = (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes; - this.xmlCharKey = (_a = parsingOptions.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - } - async sendRequest(request) { - return this._nextPolicy.sendRequest(request).then((response) => deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response, { - xmlCharKey: this.xmlCharKey - })); + _this.xmlCharKey = (_a = parsingOptions.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; + return _this; } -} + DeserializationPolicy.prototype.sendRequest = function (request) { + return tslib.__awaiter(this, void 0, void 0, function () { + var _this = this; + return tslib.__generator(this, function (_a) { + return [2 /*return*/, this._nextPolicy.sendRequest(request).then(function (response) { + return deserializeResponseBody(_this.jsonContentTypes, _this.xmlContentTypes, response, { + xmlCharKey: _this.xmlCharKey + }); + })]; + }); + }); + }; + return DeserializationPolicy; +}(BaseRequestPolicy)); function getOperationResponse(parsedResponse) { - let result; - const request = parsedResponse.request; - const operationSpec = request.operationSpec; + var result; + var request = parsedResponse.request; + var operationSpec = request.operationSpec; if (operationSpec) { - const operationResponseGetter = request.operationResponseGetter; + var operationResponseGetter = request.operationResponseGetter; if (!operationResponseGetter) { result = operationSpec.responses[parsedResponse.status]; } @@ -12162,8 +13198,8 @@ function getOperationResponse(parsedResponse) { return result; } function shouldDeserializeResponse(parsedResponse) { - const shouldDeserialize = parsedResponse.request.shouldDeserialize; - let result; + var shouldDeserialize = parsedResponse.request.shouldDeserialize; + var result; if (shouldDeserialize === undefined) { result = true; } @@ -12175,23 +13211,24 @@ function shouldDeserializeResponse(parsedResponse) { } return result; } -function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options = {}) { +function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options) { var _a, _b, _c; - const updatedOptions = { + if (options === void 0) { options = {}; } + var updatedOptions = { rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY }; - return parse(jsonContentTypes, xmlContentTypes, response, updatedOptions).then((parsedResponse) => { + return parse(jsonContentTypes, xmlContentTypes, response, updatedOptions).then(function (parsedResponse) { if (!shouldDeserializeResponse(parsedResponse)) { return parsedResponse; } - const operationSpec = parsedResponse.request.operationSpec; + var operationSpec = parsedResponse.request.operationSpec; if (!operationSpec || !operationSpec.responses) { return parsedResponse; } - const responseSpec = getOperationResponse(parsedResponse); - const { error, shouldReturnResponse } = handleErrorResponse(parsedResponse, operationSpec, responseSpec); + var responseSpec = getOperationResponse(parsedResponse); + var _a = handleErrorResponse(parsedResponse, operationSpec, responseSpec), error = _a.error, shouldReturnResponse = _a.shouldReturnResponse; if (error) { throw error; } @@ -12202,7 +13239,7 @@ function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, op // use it to deserialize the response. if (responseSpec) { if (responseSpec.bodyMapper) { - let valueToDeserialize = parsedResponse.parsedBody; + var valueToDeserialize = parsedResponse.parsedBody; if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) { valueToDeserialize = typeof valueToDeserialize === "object" @@ -12213,7 +13250,7 @@ function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, op parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody", options); } catch (innerError) { - const restError = new RestError(`Error ${innerError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); + var restError = new RestError("Error " + innerError + " occurred in deserializing the responseBody - " + parsedResponse.bodyAsText, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); throw restError; } } @@ -12229,14 +13266,14 @@ function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, op }); } function isOperationSpecEmpty(operationSpec) { - const expectedStatusCodes = Object.keys(operationSpec.responses); + var expectedStatusCodes = Object.keys(operationSpec.responses); return (expectedStatusCodes.length === 0 || (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default")); } function handleErrorResponse(parsedResponse, operationSpec, responseSpec) { var _a; - const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300; - const isExpectedStatusCode = isOperationSpecEmpty(operationSpec) + var isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300; + var isExpectedStatusCode = isOperationSpecEmpty(operationSpec) ? isSuccessByStatus : !!responseSpec; if (isExpectedStatusCode) { @@ -12249,35 +13286,35 @@ function handleErrorResponse(parsedResponse, operationSpec, responseSpec) { return { error: null, shouldReturnResponse: false }; } } - const errorResponseSpec = responseSpec !== null && responseSpec !== void 0 ? responseSpec : operationSpec.responses.default; - const streaming = ((_a = parsedResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(parsedResponse.status)) || + var errorResponseSpec = responseSpec !== null && responseSpec !== void 0 ? responseSpec : operationSpec.responses.default; + var streaming = ((_a = parsedResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(parsedResponse.status)) || parsedResponse.request.streamResponseBody; - const initialErrorMessage = streaming - ? `Unexpected status code: ${parsedResponse.status}` + var initialErrorMessage = streaming + ? "Unexpected status code: " + parsedResponse.status : parsedResponse.bodyAsText; - const error = new RestError(initialErrorMessage, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); + var error = new RestError(initialErrorMessage, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); // If the item failed but there's no error spec or default spec to deserialize the error, // we should fail so we just throw the parsed response if (!errorResponseSpec) { throw error; } - const defaultBodyMapper = errorResponseSpec.bodyMapper; - const defaultHeadersMapper = errorResponseSpec.headersMapper; + var defaultBodyMapper = errorResponseSpec.bodyMapper; + var defaultHeadersMapper = errorResponseSpec.headersMapper; try { // If error response has a body, try to deserialize it using default body mapper. // Then try to extract error code & message from it if (parsedResponse.parsedBody) { - const parsedBody = parsedResponse.parsedBody; - let parsedError; + var parsedBody = parsedResponse.parsedBody; + var parsedError = void 0; if (defaultBodyMapper) { - let valueToDeserialize = parsedBody; + var valueToDeserialize = parsedBody; if (operationSpec.isXML && defaultBodyMapper.type.name === MapperType.Sequence) { valueToDeserialize = typeof parsedBody === "object" ? parsedBody[defaultBodyMapper.xmlElementName] : []; } parsedError = operationSpec.serializer.deserialize(defaultBodyMapper, valueToDeserialize, "error.response.parsedBody"); } - const internalError = parsedBody.error || parsedError || parsedBody; + var internalError = parsedBody.error || parsedError || parsedBody; error.code = internalError.code; if (internalError.message) { error.message = internalError.message; @@ -12292,36 +13329,36 @@ function handleErrorResponse(parsedResponse, operationSpec, responseSpec) { } } catch (defaultError) { - error.message = `Error "${defaultError.message}" occurred in deserializing the responseBody - "${parsedResponse.bodyAsText}" for the default response.`; + error.message = "Error \"" + defaultError.message + "\" occurred in deserializing the responseBody - \"" + parsedResponse.bodyAsText + "\" for the default response."; } - return { error, shouldReturnResponse: false }; + return { error: error, shouldReturnResponse: false }; } function parse(jsonContentTypes, xmlContentTypes, operationResponse, opts) { var _a; - const errorHandler = (err) => { - const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`; - const errCode = err.code || RestError.PARSE_ERROR; - const e = new RestError(msg, errCode, operationResponse.status, operationResponse.request, operationResponse); + var errorHandler = function (err) { + var msg = "Error \"" + err + "\" occurred while parsing the response body - " + operationResponse.bodyAsText + "."; + var errCode = err.code || RestError.PARSE_ERROR; + var e = new RestError(msg, errCode, operationResponse.status, operationResponse.request, operationResponse); return Promise.reject(e); }; - const streaming = ((_a = operationResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(operationResponse.status)) || + var streaming = ((_a = operationResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(operationResponse.status)) || operationResponse.request.streamResponseBody; if (!streaming && operationResponse.bodyAsText) { - const text = operationResponse.bodyAsText; - const contentType = operationResponse.headers.get("Content-Type") || ""; - const contentComponents = !contentType + var text_1 = operationResponse.bodyAsText; + var contentType = operationResponse.headers.get("Content-Type") || ""; + var contentComponents = !contentType ? [] - : contentType.split(";").map((component) => component.toLowerCase()); + : contentType.split(";").map(function (component) { return component.toLowerCase(); }); if (contentComponents.length === 0 || - contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)) { - return new Promise((resolve) => { - operationResponse.parsedBody = JSON.parse(text); + contentComponents.some(function (component) { return jsonContentTypes.indexOf(component) !== -1; })) { + return new Promise(function (resolve) { + operationResponse.parsedBody = JSON.parse(text_1); resolve(operationResponse); }).catch(errorHandler); } - else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) { - return parseXML(text, opts) - .then((body) => { + else if (contentComponents.some(function (component) { return xmlContentTypes.indexOf(component) !== -1; })) { + return parseXML(text_1, opts) + .then(function (body) { operationResponse.parsedBody = body; return operationResponse; }) @@ -12333,11 +13370,11 @@ function parse(jsonContentTypes, xmlContentTypes, operationResponse, opts) { // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -const DEFAULT_CLIENT_RETRY_COUNT = 3; +var DEFAULT_CLIENT_RETRY_COUNT = 3; // intervals are in ms -const DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; -const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; -const DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; +var DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; +var DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; +var DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; function isNumber(n) { return typeof n === "number"; } @@ -12364,7 +13401,8 @@ function shouldRetry(retryLimit, predicate, retryData, response, error) { * @param retryData - The retry data. * @param err - The operation"s error, if any. */ -function updateRetryData(retryOptions, retryData = { retryCount: 0, retryInterval: 0 }, err) { +function updateRetryData(retryOptions, retryData, err) { + if (retryData === void 0) { retryData = { retryCount: 0, retryInterval: 0 }; } if (err) { if (retryData.error) { err.innerError = retryData.error; @@ -12374,72 +13412,18 @@ function updateRetryData(retryOptions, retryData = { retryCount: 0, retryInterva // Adjust retry count retryData.retryCount++; // Adjust retry interval - let incrementDelta = Math.pow(2, retryData.retryCount - 1) - 1; - const boundedRandDelta = retryOptions.retryInterval * 0.8 + + var incrementDelta = Math.pow(2, retryData.retryCount - 1) - 1; + var boundedRandDelta = retryOptions.retryInterval * 0.8 + Math.floor(Math.random() * (retryOptions.retryInterval * 0.4)); incrementDelta *= boundedRandDelta; retryData.retryInterval = Math.min(retryOptions.minRetryInterval + incrementDelta, retryOptions.maxRetryInterval); return retryData; } -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Helper TypeGuard that checks if the value is not null or undefined. - * @param thing - Anything - * @internal - */ -function isDefined(thing) { - return typeof thing !== "undefined" && thing !== null; -} - -// Copyright (c) Microsoft Corporation. -const StandardAbortMessage = "The operation was aborted."; -/** - * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds. - * @param delayInMs - The number of milliseconds to be delayed. - * @param value - The value to be resolved with after a timeout of t milliseconds. - * @param options - The options for delay - currently abort options - * @param abortSignal - The abortSignal associated with containing operation. - * @param abortErrorMsg - The abort error message associated with containing operation. - * @returns - Resolved promise - */ -function delay(delayInMs, value, options) { - return new Promise((resolve, reject) => { - let timer = undefined; - let onAborted = undefined; - const rejectOnAbort = () => { - return reject(new abortController.AbortError((options === null || options === void 0 ? void 0 : options.abortErrorMsg) ? options === null || options === void 0 ? void 0 : options.abortErrorMsg : StandardAbortMessage)); - }; - const removeListeners = () => { - if ((options === null || options === void 0 ? void 0 : options.abortSignal) && onAborted) { - options.abortSignal.removeEventListener("abort", onAborted); - } - }; - onAborted = () => { - if (isDefined(timer)) { - clearTimeout(timer); - } - removeListeners(); - return rejectOnAbort(); - }; - if ((options === null || options === void 0 ? void 0 : options.abortSignal) && options.abortSignal.aborted) { - return rejectOnAbort(); - } - timer = setTimeout(() => { - removeListeners(); - resolve(value); - }, delayInMs); - if (options === null || options === void 0 ? void 0 : options.abortSignal) { - options.abortSignal.addEventListener("abort", onAborted); - } - }); -} - // Copyright (c) Microsoft Corporation. function exponentialRetryPolicy(retryCount, retryInterval, maxRetryInterval) { return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, maxRetryInterval); } }; @@ -12447,7 +13431,7 @@ function exponentialRetryPolicy(retryCount, retryInterval, maxRetryInterval) { (function (RetryMode) { RetryMode[RetryMode["Exponential"] = 0] = "Exponential"; })(exports.RetryMode || (exports.RetryMode = {})); -const DefaultRetryOptions = { +var DefaultRetryOptions = { maxRetries: DEFAULT_CLIENT_RETRY_COUNT, retryDelayInMs: DEFAULT_CLIENT_RETRY_INTERVAL, maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL @@ -12455,7 +13439,8 @@ const DefaultRetryOptions = { /** * Instantiates a new "ExponentialRetryPolicyFilter" instance. */ -class ExponentialRetryPolicy extends BaseRequestPolicy { +var ExponentialRetryPolicy = /** @class */ (function (_super) { + tslib.__extends(ExponentialRetryPolicy, _super); /** * @param nextPolicy - The next RequestPolicy in the pipeline chain. * @param options - The options for this RequestPolicy. @@ -12464,188 +13449,217 @@ class ExponentialRetryPolicy extends BaseRequestPolicy { * @param minRetryInterval - The minimum retry interval, in milliseconds. * @param maxRetryInterval - The maximum retry interval, in milliseconds. */ - constructor(nextPolicy, options, retryCount, retryInterval, maxRetryInterval) { - super(nextPolicy, options); - this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; - this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; - this.maxRetryInterval = isNumber(maxRetryInterval) + function ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, maxRetryInterval) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + _this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + _this.maxRetryInterval = isNumber(maxRetryInterval) ? maxRetryInterval : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + return _this; } - sendRequest(request) { + ExponentialRetryPolicy.prototype.sendRequest = function (request) { + var _this = this; return this._nextPolicy .sendRequest(request.clone()) - .then((response) => retry(this, request, response)) - .catch((error) => retry(this, request, error.response, undefined, error)); - } -} -async function retry(policy, request, response, retryData, requestError) { - function shouldPolicyRetry(responseParam) { - const statusCode = responseParam === null || responseParam === void 0 ? void 0 : responseParam.status; - if (statusCode === 503 && (response === null || response === void 0 ? void 0 : response.headers.get(Constants.HeaderConstants.RETRY_AFTER))) { - return false; - } - if (statusCode === undefined || - (statusCode < 500 && statusCode !== 408) || - statusCode === 501 || - statusCode === 505) { - return false; - } - return true; - } - retryData = updateRetryData({ - retryInterval: policy.retryInterval, - minRetryInterval: 0, - maxRetryInterval: policy.maxRetryInterval - }, retryData, requestError); - const isAborted = request.abortSignal && request.abortSignal.aborted; - if (!isAborted && shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, response)) { - logger.info(`Retrying request in ${retryData.retryInterval}`); - try { - await delay(retryData.retryInterval); - const res = await policy._nextPolicy.sendRequest(request.clone()); - return retry(policy, request, res, retryData); - } - catch (err) { - return retry(policy, request, response, retryData, err); + .then(function (response) { return retry(_this, request, response); }) + .catch(function (error) { return retry(_this, request, error.response, undefined, error); }); + }; + return ExponentialRetryPolicy; +}(BaseRequestPolicy)); +function retry(policy, request, response, retryData, requestError) { + return tslib.__awaiter(this, void 0, void 0, function () { + function shouldPolicyRetry(responseParam) { + var statusCode = responseParam === null || responseParam === void 0 ? void 0 : responseParam.status; + if (statusCode === undefined || + (statusCode < 500 && statusCode !== 408) || + statusCode === 501 || + statusCode === 505) { + return false; + } + return true; } - } - else if (isAborted || requestError || !response) { - // If the operation failed in the end, return all errors instead of just the last one - const err = retryData.error || - new RestError("Failed to send the request.", RestError.REQUEST_SEND_ERROR, response && response.status, response && response.request, response); - throw err; - } - else { - return response; - } + var isAborted, res, err_1, err; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + retryData = updateRetryData({ + retryInterval: policy.retryInterval, + minRetryInterval: 0, + maxRetryInterval: policy.maxRetryInterval + }, retryData, requestError); + isAborted = request.abortSignal && request.abortSignal.aborted; + if (!(!isAborted && shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, response))) return [3 /*break*/, 6]; + logger.info("Retrying request in " + retryData.retryInterval); + _a.label = 1; + case 1: + _a.trys.push([1, 4, , 5]); + return [4 /*yield*/, delay(retryData.retryInterval)]; + case 2: + _a.sent(); + return [4 /*yield*/, policy._nextPolicy.sendRequest(request.clone())]; + case 3: + res = _a.sent(); + return [2 /*return*/, retry(policy, request, res, retryData)]; + case 4: + err_1 = _a.sent(); + return [2 /*return*/, retry(policy, request, response, retryData, err_1)]; + case 5: return [3 /*break*/, 7]; + case 6: + if (isAborted || requestError || !response) { + err = retryData.error || + new RestError("Failed to send the request.", RestError.REQUEST_SEND_ERROR, response && response.status, response && response.request, response); + throw err; + } + else { + return [2 /*return*/, response]; + } + case 7: return [2 /*return*/]; + } + }); + }); } // Copyright (c) Microsoft Corporation. -function generateClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { +function generateClientRequestIdPolicy(requestIdHeaderName) { + if (requestIdHeaderName === void 0) { requestIdHeaderName = "x-ms-client-request-id"; } return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); } }; } -class GenerateClientRequestIdPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, _requestIdHeaderName) { - super(nextPolicy, options); - this._requestIdHeaderName = _requestIdHeaderName; +var GenerateClientRequestIdPolicy = /** @class */ (function (_super) { + tslib.__extends(GenerateClientRequestIdPolicy, _super); + function GenerateClientRequestIdPolicy(nextPolicy, options, _requestIdHeaderName) { + var _this = _super.call(this, nextPolicy, options) || this; + _this._requestIdHeaderName = _requestIdHeaderName; + return _this; } - sendRequest(request) { + GenerateClientRequestIdPolicy.prototype.sendRequest = function (request) { if (!request.headers.contains(this._requestIdHeaderName)) { request.headers.set(this._requestIdHeaderName, request.requestId); } return this._nextPolicy.sendRequest(request); - } -} + }; + return GenerateClientRequestIdPolicy; +}(BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. function getDefaultUserAgentKey() { return Constants.HeaderConstants.USER_AGENT; } function getPlatformSpecificData() { - const runtimeInfo = { + var runtimeInfo = { key: "Node", value: process.version }; - const osInfo = { + var osInfo = { key: "OS", - value: `(${os.arch()}-${os.type()}-${os.release()})` + value: "(" + os.arch() + "-" + os.type() + "-" + os.release() + ")" }; return [runtimeInfo, osInfo]; } // Copyright (c) Microsoft Corporation. function getRuntimeInfo() { - const msRestRuntime = { + var msRestRuntime = { key: "core-http", value: Constants.coreHttpVersion }; return [msRestRuntime]; } -function getUserAgentString(telemetryInfo, keySeparator = " ", valueSeparator = "/") { +function getUserAgentString(telemetryInfo, keySeparator, valueSeparator) { + if (keySeparator === void 0) { keySeparator = " "; } + if (valueSeparator === void 0) { valueSeparator = "/"; } return telemetryInfo - .map((info) => { - const value = info.value ? `${valueSeparator}${info.value}` : ""; - return `${info.key}${value}`; + .map(function (info) { + var value = info.value ? "" + valueSeparator + info.value : ""; + return "" + info.key + value; }) .join(keySeparator); } -const getDefaultUserAgentHeaderName = getDefaultUserAgentKey; +var getDefaultUserAgentHeaderName = getDefaultUserAgentKey; function getDefaultUserAgentValue() { - const runtimeInfo = getRuntimeInfo(); - const platformSpecificData = getPlatformSpecificData(); - const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); + var runtimeInfo = getRuntimeInfo(); + var platformSpecificData = getPlatformSpecificData(); + var userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); return userAgent; } function userAgentPolicy(userAgentData) { - const key = !userAgentData || userAgentData.key === undefined || userAgentData.key === null + var key = !userAgentData || userAgentData.key === undefined || userAgentData.key === null ? getDefaultUserAgentKey() : userAgentData.key; - const value = !userAgentData || userAgentData.value === undefined || userAgentData.value === null + var value = !userAgentData || userAgentData.value === undefined || userAgentData.value === null ? getDefaultUserAgentValue() : userAgentData.value; return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new UserAgentPolicy(nextPolicy, options, key, value); } }; } -class UserAgentPolicy extends BaseRequestPolicy { - constructor(_nextPolicy, _options, headerKey, headerValue) { - super(_nextPolicy, _options); - this._nextPolicy = _nextPolicy; - this._options = _options; - this.headerKey = headerKey; - this.headerValue = headerValue; +var UserAgentPolicy = /** @class */ (function (_super) { + tslib.__extends(UserAgentPolicy, _super); + function UserAgentPolicy(_nextPolicy, _options, headerKey, headerValue) { + var _this = _super.call(this, _nextPolicy, _options) || this; + _this._nextPolicy = _nextPolicy; + _this._options = _options; + _this.headerKey = headerKey; + _this.headerValue = headerValue; + return _this; } - sendRequest(request) { + UserAgentPolicy.prototype.sendRequest = function (request) { this.addUserAgentHeader(request); return this._nextPolicy.sendRequest(request); - } - addUserAgentHeader(request) { + }; + UserAgentPolicy.prototype.addUserAgentHeader = function (request) { if (!request.headers) { request.headers = new HttpHeaders(); } if (!request.headers.get(this.headerKey) && this.headerValue) { request.headers.set(this.headerKey, this.headerValue); } - } -} + }; + return UserAgentPolicy; +}(BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. /** * Methods that are allowed to follow redirects 301 and 302 */ -const allowedRedirect = ["GET", "HEAD"]; -const DefaultRedirectOptions = { +var allowedRedirect = ["GET", "HEAD"]; +var DefaultRedirectOptions = { handleRedirects: true, maxRetries: 20 }; -function redirectPolicy(maximumRetries = 20) { +function redirectPolicy(maximumRetries) { + if (maximumRetries === void 0) { maximumRetries = 20; } return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new RedirectPolicy(nextPolicy, options, maximumRetries); } }; } -class RedirectPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, maxRetries = 20) { - super(nextPolicy, options); - this.maxRetries = maxRetries; +var RedirectPolicy = /** @class */ (function (_super) { + tslib.__extends(RedirectPolicy, _super); + function RedirectPolicy(nextPolicy, options, maxRetries) { + if (maxRetries === void 0) { maxRetries = 20; } + var _this = _super.call(this, nextPolicy, options) || this; + _this.maxRetries = maxRetries; + return _this; } - sendRequest(request) { + RedirectPolicy.prototype.sendRequest = function (request) { + var _this = this; return this._nextPolicy .sendRequest(request) - .then((response) => handleRedirect(this, response, 0)); - } -} + .then(function (response) { return handleRedirect(_this, response, 0); }); + }; + return RedirectPolicy; +}(BaseRequestPolicy)); function handleRedirect(policy, response, currentRetries) { - const { request, status } = response; - const locationHeader = response.headers.get("location"); + var request = response.request, status = response.status; + var locationHeader = response.headers.get("location"); if (locationHeader && (status === 300 || (status === 301 && allowedRedirect.includes(request.method)) || @@ -12653,7 +13667,7 @@ function handleRedirect(policy, response, currentRetries) { (status === 303 && request.method === "POST") || status === 307) && (!policy.maxRetries || currentRetries < policy.maxRetries)) { - const builder = URLBuilder.parse(request.url); + var builder = URLBuilder.parse(request.url); builder.setPath(locationHeader); request.url = builder.toString(); // POST request with Status code 303 should be converted into a @@ -12664,41 +13678,47 @@ function handleRedirect(policy, response, currentRetries) { } return policy._nextPolicy .sendRequest(request) - .then((res) => handleRedirect(policy, res, currentRetries + 1)); + .then(function (res) { return handleRedirect(policy, res, currentRetries + 1); }); } return Promise.resolve(response); } // Copyright (c) Microsoft Corporation. -function rpRegistrationPolicy(retryTimeout = 30) { +function rpRegistrationPolicy(retryTimeout) { + if (retryTimeout === void 0) { retryTimeout = 30; } return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); } }; } -class RPRegistrationPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, _retryTimeout = 30) { - super(nextPolicy, options); - this._retryTimeout = _retryTimeout; +var RPRegistrationPolicy = /** @class */ (function (_super) { + tslib.__extends(RPRegistrationPolicy, _super); + function RPRegistrationPolicy(nextPolicy, options, _retryTimeout) { + if (_retryTimeout === void 0) { _retryTimeout = 30; } + var _this = _super.call(this, nextPolicy, options) || this; + _this._retryTimeout = _retryTimeout; + return _this; } - sendRequest(request) { + RPRegistrationPolicy.prototype.sendRequest = function (request) { + var _this = this; return this._nextPolicy .sendRequest(request.clone()) - .then((response) => registerIfNeeded(this, request, response)); - } -} + .then(function (response) { return registerIfNeeded(_this, request, response); }); + }; + return RPRegistrationPolicy; +}(BaseRequestPolicy)); function registerIfNeeded(policy, request, response) { if (response.status === 409) { - const rpName = checkRPNotRegisteredError(response.bodyAsText); + var rpName = checkRPNotRegisteredError(response.bodyAsText); if (rpName) { - const urlPrefix = extractSubscriptionUrl(request.url); + var urlPrefix = extractSubscriptionUrl(request.url); return (registerRP(policy, urlPrefix, rpName, request) // Autoregistration of ${provider} failed for some reason. We will not return this error // instead will return the initial response with 409 status code back to the user. // do nothing here as we are returning the original response at the end of this method. - .catch(() => false) - .then((registrationStatus) => { + .catch(function () { return false; }) + .then(function (registrationStatus) { if (registrationStatus) { // Retry the original request. We have to change the x-ms-client-request-id // otherwise Azure endpoint will return the initial 409 (cached) response. @@ -12717,8 +13737,9 @@ function registerIfNeeded(policy, request, response) { * @param reuseUrlToo - Should the url from the original request be reused as well. Default false. * @returns A new request object with desired headers. */ -function getRequestEssentials(originalRequest, reuseUrlToo = false) { - const reqOptions = originalRequest.clone(); +function getRequestEssentials(originalRequest, reuseUrlToo) { + if (reuseUrlToo === void 0) { reuseUrlToo = false; } + var reqOptions = originalRequest.clone(); if (reuseUrlToo) { reqOptions.url = originalRequest.url; } @@ -12736,7 +13757,7 @@ function getRequestEssentials(originalRequest, reuseUrlToo = false) { * @returns The name of the RP if condition is satisfied else undefined. */ function checkRPNotRegisteredError(body) { - let result, responseBody; + var result, responseBody; if (body) { try { responseBody = JSON.parse(body); @@ -12749,7 +13770,7 @@ function checkRPNotRegisteredError(body) { responseBody.error.message && responseBody.error.code && responseBody.error.code === "MissingSubscriptionRegistration") { - const matchRes = responseBody.error.message.match(/.*'(.*)'/i); + var matchRes = responseBody.error.message.match(/.*'(.*)'/i); if (matchRes) { result = matchRes.pop(); } @@ -12764,13 +13785,13 @@ function checkRPNotRegisteredError(body) { * @returns The url prefix as explained above. */ function extractSubscriptionUrl(url) { - let result; - const matchRes = url.match(/.*\/subscriptions\/[a-f0-9-]+\//gi); + var result; + var matchRes = url.match(/.*\/subscriptions\/[a-f0-9-]+\//gi); if (matchRes && matchRes[0]) { result = matchRes[0]; } else { - throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`); + throw new Error("Unable to extract subscriptionId from the given url - " + url + "."); } return result; } @@ -12781,18 +13802,20 @@ function extractSubscriptionUrl(url) { * @param provider - The provider name to be registered. * @param originalRequest - The original request sent by the user that returned a 409 response * with a message that the provider is not registered. + * @param callback - The callback that handles the RP registration */ -async function registerRP(policy, urlPrefix, provider, originalRequest) { - const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`; - const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`; - const reqOptions = getRequestEssentials(originalRequest); +function registerRP(policy, urlPrefix, provider, originalRequest) { + var postUrl = urlPrefix + "providers/" + provider + "/register?api-version=2016-02-01"; + var getUrl = urlPrefix + "providers/" + provider + "?api-version=2016-02-01"; + var reqOptions = getRequestEssentials(originalRequest); reqOptions.method = "POST"; reqOptions.url = postUrl; - const response = await policy._nextPolicy.sendRequest(reqOptions); - if (response.status !== 200) { - throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`); - } - return getRegistrationStatus(policy, getUrl, originalRequest); + return policy._nextPolicy.sendRequest(reqOptions).then(function (response) { + if (response.status !== 200) { + throw new Error("Autoregistration of " + provider + " failed. Please try registering manually."); + } + return getRegistrationStatus(policy, getUrl, originalRequest); + }); } /** * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds. @@ -12803,24 +13826,25 @@ async function registerRP(policy, urlPrefix, provider, originalRequest) { * with a message that the provider is not registered. * @returns True if RP Registration is successful. */ -async function getRegistrationStatus(policy, url, originalRequest) { - const reqOptions = getRequestEssentials(originalRequest); +function getRegistrationStatus(policy, url, originalRequest) { + var reqOptions = getRequestEssentials(originalRequest); reqOptions.url = url; reqOptions.method = "GET"; - const res = await policy._nextPolicy.sendRequest(reqOptions); - const obj = res.parsedBody; - if (res.parsedBody && obj.registrationState && obj.registrationState === "Registered") { - return true; - } - else { - await delay(policy._retryTimeout * 1000); - return getRegistrationStatus(policy, url, originalRequest); - } + return policy._nextPolicy.sendRequest(reqOptions).then(function (res) { + var obj = res.parsedBody; + if (res.parsedBody && obj.registrationState && obj.registrationState === "Registered") { + return true; + } + else { + return delay(policy._retryTimeout * 1000) + .then(function () { return getRegistrationStatus(policy, url, originalRequest); }); + } + }); } // Copyright (c) Microsoft Corporation. // Default options for the cycler if none are provided -const DEFAULT_CYCLER_OPTIONS = { +var DEFAULT_CYCLER_OPTIONS = { forcedRefreshWindowInMs: 1000, retryIntervalInMs: 3000, refreshWindowInMs: 1000 * 60 * 2 // Start refreshing 2m before expiry @@ -12838,33 +13862,59 @@ const DEFAULT_CYCLER_OPTIONS = { * throwing an exception * @returns - a promise that, if it resolves, will resolve with an access token */ -async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { - // This wrapper handles exceptions gracefully as long as we haven't exceeded - // the timeout. - async function tryGetAccessToken() { - if (Date.now() < timeoutInMs) { - try { - return await getAccessToken(); - } - catch (_a) { - return null; - } +function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { + return tslib.__awaiter(this, void 0, void 0, function () { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + function tryGetAccessToken() { + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, finalToken; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + if (!(Date.now() < timeoutInMs)) return [3 /*break*/, 5]; + _b.label = 1; + case 1: + _b.trys.push([1, 3, , 4]); + return [4 /*yield*/, getAccessToken()]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + _a = _b.sent(); + return [2 /*return*/, null]; + case 4: return [3 /*break*/, 7]; + case 5: return [4 /*yield*/, getAccessToken()]; + case 6: + finalToken = _b.sent(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return [2 /*return*/, finalToken]; + case 7: return [2 /*return*/]; + } + }); + }); } - else { - const finalToken = await getAccessToken(); - // Timeout is up, so throw if it's still null - if (finalToken === null) { - throw new Error("Failed to refresh access token."); + var token; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, tryGetAccessToken()]; + case 1: + token = _a.sent(); + _a.label = 2; + case 2: + if (!(token === null)) return [3 /*break*/, 5]; + return [4 /*yield*/, delay(retryIntervalInMs)]; + case 3: + _a.sent(); + return [4 /*yield*/, tryGetAccessToken()]; + case 4: + token = _a.sent(); + return [3 /*break*/, 2]; + case 5: return [2 /*return*/, token]; } - return finalToken; - } - } - let token = await tryGetAccessToken(); - while (token === null) { - await delay(retryIntervalInMs); - token = await tryGetAccessToken(); - } - return token; + }); + }); } /** * Creates a token cycler from a credential, scopes, and optional settings. @@ -12882,14 +13932,15 @@ async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { * @returns - a function that reliably produces a valid access token */ function createTokenCycler(credential, scopes, tokenCyclerOptions) { - let refreshWorker = null; - let token = null; - const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + var _this = this; + var refreshWorker = null; + var token = null; + var options = tslib.__assign(tslib.__assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); /** * This little holder defines several predicates that we use to construct * the rules of refreshing the token. */ - const cycler = { + var cycler = { /** * Produces true if a refresh job is currently in progress. */ @@ -12921,18 +13972,20 @@ function createTokenCycler(credential, scopes, tokenCyclerOptions) { var _a; if (!cycler.isRefreshing) { // We bind `scopes` here to avoid passing it around a lot - const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + var tryGetAccessToken = function () { + return credential.getToken(scopes, getTokenOptions); + }; // Take advantage of promise chaining to insert an assignment to `token` // before the refresh can be considered done. refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, // If we don't have a token, then we should timeout immediately (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) - .then((_token) => { + .then(function (_token) { refreshWorker = null; token = _token; return token; }) - .catch((reason) => { + .catch(function (reason) { // We also should reset the refresher if we enter a failed state. All // existing awaiters will throw, but subsequent requests will start a // new retry chain. @@ -12943,23 +13996,25 @@ function createTokenCycler(credential, scopes, tokenCyclerOptions) { } return refreshWorker; } - return async (tokenOptions) => { - // - // Simple rules: - // - If we MUST refresh, then return the refresh task, blocking - // the pipeline until a token is available. - // - If we SHOULD refresh, then run refresh but don't return it - // (we can still use the cached token). - // - Return the token, since it's fine if we didn't return in - // step 1. - // - if (cycler.mustRefresh) - return refresh(tokenOptions); - if (cycler.shouldRefresh) { - refresh(tokenOptions); - } - return token; - }; + return function (tokenOptions) { return tslib.__awaiter(_this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + if (cycler.mustRefresh) + return [2 /*return*/, refresh(tokenOptions)]; + if (cycler.shouldRefresh) { + refresh(tokenOptions); + } + return [2 /*return*/, token]; + }); + }); }; } // #endregion /** @@ -12971,28 +14026,36 @@ function createTokenCycler(credential, scopes, tokenCyclerOptions) { */ function bearerTokenAuthenticationPolicy(credential, scopes) { // This simple function encapsulates the entire process of reliably retrieving the token - const getToken = createTokenCycler(credential, scopes /* , options */); - class BearerTokenAuthenticationPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - async sendRequest(webResource) { - if (!webResource.url.toLowerCase().startsWith("https://")) { - throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); - } - const { token } = await getToken({ - abortSignal: webResource.abortSignal, - tracingOptions: { - spanOptions: webResource.spanOptions, - tracingContext: webResource.tracingContext - } + var getToken = createTokenCycler(credential, scopes /* , options */); + var BearerTokenAuthenticationPolicy = /** @class */ (function (_super) { + tslib.__extends(BearerTokenAuthenticationPolicy, _super); + function BearerTokenAuthenticationPolicy(nextPolicy, options) { + return _super.call(this, nextPolicy, options) || this; + } + BearerTokenAuthenticationPolicy.prototype.sendRequest = function (webResource) { + return tslib.__awaiter(this, void 0, void 0, function () { + var token; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, getToken({ + abortSignal: webResource.abortSignal, + tracingOptions: { + spanOptions: webResource.spanOptions, + tracingContext: webResource.tracingContext + } + })]; + case 1: + token = (_a.sent()).token; + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, "Bearer " + token); + return [2 /*return*/, this._nextPolicy.sendRequest(webResource)]; + } + }); }); - webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); - return this._nextPolicy.sendRequest(webResource); - } - } + }; + return BearerTokenAuthenticationPolicy; + }(BaseRequestPolicy)); return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new BearerTokenAuthenticationPolicy(nextPolicy, options); } }; @@ -13001,7 +14064,7 @@ function bearerTokenAuthenticationPolicy(credential, scopes) { // Copyright (c) Microsoft Corporation. function systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); } }; @@ -13012,55 +14075,69 @@ function systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, max * @param minRetryInterval - The minimum retry interval, in milliseconds. * @param maxRetryInterval - The maximum retry interval, in milliseconds. */ -class SystemErrorRetryPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { - super(nextPolicy, options); - this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; - this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; - this.minRetryInterval = isNumber(minRetryInterval) +var SystemErrorRetryPolicy = /** @class */ (function (_super) { + tslib.__extends(SystemErrorRetryPolicy, _super); + function SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + _this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + _this.minRetryInterval = isNumber(minRetryInterval) ? minRetryInterval : DEFAULT_CLIENT_MIN_RETRY_INTERVAL; - this.maxRetryInterval = isNumber(maxRetryInterval) + _this.maxRetryInterval = isNumber(maxRetryInterval) ? maxRetryInterval : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + return _this; } - sendRequest(request) { + SystemErrorRetryPolicy.prototype.sendRequest = function (request) { + var _this = this; return this._nextPolicy .sendRequest(request.clone()) - .catch((error) => retry$1(this, request, error.response, error)); - } -} -async function retry$1(policy, request, operationResponse, err, retryData) { - retryData = updateRetryData(policy, retryData, err); - function shouldPolicyRetry(_response, error) { - if (error && - error.code && - (error.code === "ETIMEDOUT" || - error.code === "ESOCKETTIMEDOUT" || - error.code === "ECONNREFUSED" || - error.code === "ECONNRESET" || - error.code === "ENOENT")) { - return true; - } - return false; - } - if (shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, operationResponse, err)) { - // If previous operation ended with an error and the policy allows a retry, do that - try { - await delay(retryData.retryInterval); - return policy._nextPolicy.sendRequest(request.clone()); - } - catch (nestedErr) { - return retry$1(policy, request, operationResponse, nestedErr, retryData); - } - } - else { - if (err) { - // If the operation failed in the end, return all errors instead of just the last one - return Promise.reject(retryData.error); + .catch(function (error) { return retry$1(_this, request, error.response, error); }); + }; + return SystemErrorRetryPolicy; +}(BaseRequestPolicy)); +function retry$1(policy, request, operationResponse, err, retryData) { + return tslib.__awaiter(this, void 0, void 0, function () { + function shouldPolicyRetry(_response, error) { + if (error && + error.code && + (error.code === "ETIMEDOUT" || + error.code === "ESOCKETTIMEDOUT" || + error.code === "ECONNREFUSED" || + error.code === "ECONNRESET" || + error.code === "ENOENT")) { + return true; + } + return false; } - return operationResponse; - } + var nestedErr_1; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + retryData = updateRetryData(policy, retryData, err); + if (!shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, operationResponse, err)) return [3 /*break*/, 5]; + _a.label = 1; + case 1: + _a.trys.push([1, 3, , 4]); + return [4 /*yield*/, delay(retryData.retryInterval)]; + case 2: + _a.sent(); + return [2 /*return*/, policy._nextPolicy.sendRequest(request.clone())]; + case 3: + nestedErr_1 = _a.sent(); + return [2 /*return*/, retry$1(policy, request, operationResponse, nestedErr_1, retryData)]; + case 4: return [3 /*break*/, 6]; + case 5: + if (err) { + // If the operation failed in the end, return all errors instead of just the last one + return [2 /*return*/, Promise.reject(retryData.error)]; + } + return [2 /*return*/, operationResponse]; + case 6: return [2 /*return*/]; + } + }); + }); } // Copyright (c) Microsoft Corporation. @@ -13074,37 +14151,34 @@ async function retry$1(policy, request, operationResponse, err, retryData) { // Copyright (c) Microsoft Corporation. /** - * Stores the patterns specified in NO_PROXY environment variable. * @internal */ -const globalNoProxyList = []; -let noProxyListLoaded = false; -/** A cache of whether a host should bypass the proxy. */ -const globalBypassedMap = new Map(); +var noProxyList = []; +var noProxyListLoaded = false; +var byPassedList = new Map(); function loadEnvironmentProxyValue() { if (!process) { return undefined; } - const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); - const allProxy = getEnvironmentValue(Constants.ALL_PROXY); - const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); + var httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); + var allProxy = getEnvironmentValue(Constants.ALL_PROXY); + var httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); return httpsProxy || allProxy || httpProxy; } -/** - * Check whether the host of a given `uri` matches any pattern in the no proxy list. - * If there's a match, any request sent to the same host shouldn't have the proxy settings set. - * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 - */ -function isBypassed(uri, noProxyList, bypassedMap) { +// Check whether the host of a given `uri` is in the noProxyList. +// If there's a match, any request sent to the same host won't have the proxy settings set. +// This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 +function isBypassed(uri) { if (noProxyList.length === 0) { return false; } - const host = URLBuilder.parse(uri).getHost(); - if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { - return bypassedMap.get(host); + var host = URLBuilder.parse(uri).getHost(); + if (byPassedList.has(host)) { + return byPassedList.get(host); } - let isBypassedFlag = false; - for (const pattern of noProxyList) { + var isBypassedFlag = false; + for (var _i = 0, noProxyList_1 = noProxyList; _i < noProxyList_1.length; _i++) { + var pattern = noProxyList_1[_i]; if (pattern[0] === ".") { // This should match either domain it self or any subdomain or host // .foo.com will match foo.com it self or *.foo.com @@ -13123,20 +14197,20 @@ function isBypassed(uri, noProxyList, bypassedMap) { } } } - bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); + byPassedList.set(host, isBypassedFlag); return isBypassedFlag; } /** * @internal */ function loadNoProxy() { - const noProxy = getEnvironmentValue(Constants.NO_PROXY); + var noProxy = getEnvironmentValue(Constants.NO_PROXY); noProxyListLoaded = true; if (noProxy) { return noProxy .split(",") - .map((item) => item.trim()) - .filter((item) => item.length); + .map(function (item) { return item.trim(); }) + .filter(function (item) { return item.length; }); } return []; } @@ -13147,184 +14221,174 @@ function getDefaultProxySettings(proxyUrl) { return undefined; } } - const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl); - const parsedUrl = URLBuilder.parse(urlWithoutAuth); - const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; + var _a = extractAuthFromUrl(proxyUrl), username = _a.username, password = _a.password, urlWithoutAuth = _a.urlWithoutAuth; + var parsedUrl = URLBuilder.parse(urlWithoutAuth); + var schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; return { host: schema + parsedUrl.getHost(), port: Number.parseInt(parsedUrl.getPort() || "80"), - username, - password + username: username, + password: password }; } -/** - * A policy that allows one to apply proxy settings to all requests. - * If not passed static settings, they will be retrieved from the HTTPS_PROXY - * or HTTP_PROXY environment variables. - * @param proxySettings - ProxySettings to use on each request. - * @param options - additional settings, for example, custom NO_PROXY patterns - */ -function proxyPolicy(proxySettings, options) { +function proxyPolicy(proxySettings) { if (!proxySettings) { proxySettings = getDefaultProxySettings(); } if (!noProxyListLoaded) { - globalNoProxyList.push(...loadNoProxy()); + noProxyList.push.apply(noProxyList, loadNoProxy()); } return { - create: (nextPolicy, requestPolicyOptions) => { - return new ProxyPolicy(nextPolicy, requestPolicyOptions, proxySettings, options === null || options === void 0 ? void 0 : options.customNoProxyList); + create: function (nextPolicy, options) { + return new ProxyPolicy(nextPolicy, options, proxySettings); } }; } function extractAuthFromUrl(url) { - const atIndex = url.indexOf("@"); + var atIndex = url.indexOf("@"); if (atIndex === -1) { return { urlWithoutAuth: url }; } - const schemeIndex = url.indexOf("://"); - const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; - const auth = url.substring(authStart, atIndex); - const colonIndex = auth.indexOf(":"); - const hasPassword = colonIndex !== -1; - const username = hasPassword ? auth.substring(0, colonIndex) : auth; - const password = hasPassword ? auth.substring(colonIndex + 1) : undefined; - const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); + var schemeIndex = url.indexOf("://"); + var authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; + var auth = url.substring(authStart, atIndex); + var colonIndex = auth.indexOf(":"); + var hasPassword = colonIndex !== -1; + var username = hasPassword ? auth.substring(0, colonIndex) : auth; + var password = hasPassword ? auth.substring(colonIndex + 1) : undefined; + var urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); return { - username, - password, - urlWithoutAuth + username: username, + password: password, + urlWithoutAuth: urlWithoutAuth }; } -class ProxyPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, proxySettings, customNoProxyList) { - super(nextPolicy, options); - this.proxySettings = proxySettings; - this.customNoProxyList = customNoProxyList; +var ProxyPolicy = /** @class */ (function (_super) { + tslib.__extends(ProxyPolicy, _super); + function ProxyPolicy(nextPolicy, options, proxySettings) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.proxySettings = proxySettings; + return _this; } - sendRequest(request) { - var _a; - if (!request.proxySettings && - !isBypassed(request.url, (_a = this.customNoProxyList) !== null && _a !== void 0 ? _a : globalNoProxyList, this.customNoProxyList ? undefined : globalBypassedMap)) { + ProxyPolicy.prototype.sendRequest = function (request) { + if (!request.proxySettings && !isBypassed(request.url)) { request.proxySettings = this.proxySettings; } return this._nextPolicy.sendRequest(request); - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Maximum number of retries for the throttling retry policy - */ -const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3; + }; + return ProxyPolicy; +}(BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. -const StatusCodes = Constants.HttpConstants.StatusCodes; +var StatusCodes = Constants.HttpConstants.StatusCodes; function throttlingRetryPolicy() { return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new ThrottlingRetryPolicy(nextPolicy, options); } }; } -const StandardAbortMessage$1 = "The operation was aborted."; /** * To learn more, please refer to * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors */ -class ThrottlingRetryPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, _handleResponse) { - super(nextPolicy, options); - this.numberOfRetries = 0; - this._handleResponse = _handleResponse || this._defaultResponseHandler; - } - async sendRequest(httpRequest) { - const response = await this._nextPolicy.sendRequest(httpRequest.clone()); - if (response.status !== StatusCodes.TooManyRequests && - response.status !== StatusCodes.ServiceUnavailable) { - return response; - } - else { - return this._handleResponse(httpRequest, response); - } +var ThrottlingRetryPolicy = /** @class */ (function (_super) { + tslib.__extends(ThrottlingRetryPolicy, _super); + function ThrottlingRetryPolicy(nextPolicy, options, _handleResponse) { + var _this = _super.call(this, nextPolicy, options) || this; + _this._handleResponse = _handleResponse || _this._defaultResponseHandler; + return _this; } - async _defaultResponseHandler(httpRequest, httpResponse) { - var _a; - const retryAfterHeader = httpResponse.headers.get(Constants.HeaderConstants.RETRY_AFTER); - if (retryAfterHeader) { - const delayInMs = ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader); - if (delayInMs) { - this.numberOfRetries += 1; - await delay(delayInMs, undefined, { - abortSignal: httpRequest.abortSignal, - abortErrorMsg: StandardAbortMessage$1 - }); - if ((_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { - throw new abortController.AbortError(StandardAbortMessage$1); - } - if (this.numberOfRetries < DEFAULT_CLIENT_MAX_RETRY_COUNT) { - return this.sendRequest(httpRequest); - } - else { - return this._nextPolicy.sendRequest(httpRequest); + ThrottlingRetryPolicy.prototype.sendRequest = function (httpRequest) { + return tslib.__awaiter(this, void 0, void 0, function () { + var _this = this; + return tslib.__generator(this, function (_a) { + return [2 /*return*/, this._nextPolicy.sendRequest(httpRequest.clone()).then(function (response) { + if (response.status !== StatusCodes.TooManyRequests) { + return response; + } + else { + return _this._handleResponse(httpRequest, response); + } + })]; + }); + }); + }; + ThrottlingRetryPolicy.prototype._defaultResponseHandler = function (httpRequest, httpResponse) { + return tslib.__awaiter(this, void 0, void 0, function () { + var retryAfterHeader, delayInMs; + var _this = this; + return tslib.__generator(this, function (_a) { + retryAfterHeader = httpResponse.headers.get(Constants.HeaderConstants.RETRY_AFTER); + if (retryAfterHeader) { + delayInMs = ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader); + if (delayInMs) { + return [2 /*return*/, delay(delayInMs).then(function (_) { return _this._nextPolicy.sendRequest(httpRequest); })]; + } } - } - } - return httpResponse; - } - static parseRetryAfterHeader(headerValue) { - const retryAfterInSeconds = Number(headerValue); + return [2 /*return*/, httpResponse]; + }); + }); + }; + ThrottlingRetryPolicy.parseRetryAfterHeader = function (headerValue) { + var retryAfterInSeconds = Number(headerValue); if (Number.isNaN(retryAfterInSeconds)) { return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue); } else { return retryAfterInSeconds * 1000; } - } - static parseDateRetryAfterHeader(headerValue) { + }; + ThrottlingRetryPolicy.parseDateRetryAfterHeader = function (headerValue) { try { - const now = Date.now(); - const date = Date.parse(headerValue); - const diff = date - now; + var now = Date.now(); + var date = Date.parse(headerValue); + var diff = date - now; return Number.isNaN(diff) ? undefined : diff; } catch (error) { return undefined; } - } -} + }; + return ThrottlingRetryPolicy; +}(BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. function signingPolicy(authenticationProvider) { return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new SigningPolicy(nextPolicy, options, authenticationProvider); } }; } -class SigningPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, authenticationProvider) { - super(nextPolicy, options); - this.authenticationProvider = authenticationProvider; +var SigningPolicy = /** @class */ (function (_super) { + tslib.__extends(SigningPolicy, _super); + function SigningPolicy(nextPolicy, options, authenticationProvider) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.authenticationProvider = authenticationProvider; + return _this; } - signRequest(request) { + SigningPolicy.prototype.signRequest = function (request) { return this.authenticationProvider.signRequest(request); - } - sendRequest(request) { - return this.signRequest(request).then((nextRequest) => this._nextPolicy.sendRequest(nextRequest)); - } -} + }; + SigningPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this.signRequest(request).then(function (nextRequest) { + return _this._nextPolicy.sendRequest(nextRequest); + }); + }; + return SigningPolicy; +}(BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. -const DefaultKeepAliveOptions = { +var DefaultKeepAliveOptions = { enable: true }; function keepAlivePolicy(keepAliveOptions) { return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions); } }; @@ -13332,7 +14396,8 @@ function keepAlivePolicy(keepAliveOptions) { /** * KeepAlivePolicy is a policy used to control keep alive settings for every request. */ -class KeepAlivePolicy extends BaseRequestPolicy { +var KeepAlivePolicy = /** @class */ (function (_super) { + tslib.__extends(KeepAlivePolicy, _super); /** * Creates an instance of KeepAlivePolicy. * @@ -13340,9 +14405,10 @@ class KeepAlivePolicy extends BaseRequestPolicy { * @param options - * @param keepAliveOptions - */ - constructor(nextPolicy, options, keepAliveOptions) { - super(nextPolicy, options); - this.keepAliveOptions = keepAliveOptions; + function KeepAlivePolicy(nextPolicy, options, keepAliveOptions) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.keepAliveOptions = keepAliveOptions; + return _this; } /** * Sends out request. @@ -13350,85 +14416,104 @@ class KeepAlivePolicy extends BaseRequestPolicy { * @param request - * @returns */ - async sendRequest(request) { - request.keepAlive = this.keepAliveOptions.enable; - return this._nextPolicy.sendRequest(request); - } -} + KeepAlivePolicy.prototype.sendRequest = function (request) { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + request.keepAlive = this.keepAliveOptions.enable; + return [2 /*return*/, this._nextPolicy.sendRequest(request)]; + }); + }); + }; + return KeepAlivePolicy; +}(BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. -const createSpan = coreTracing.createSpanFunction({ +var createSpan = coreTracing.createSpanFunction({ packagePrefix: "", namespace: "" }); -function tracingPolicy(tracingOptions = {}) { +function tracingPolicy(tracingOptions) { + if (tracingOptions === void 0) { tracingOptions = {}; } return { - create(nextPolicy, options) { + create: function (nextPolicy, options) { return new TracingPolicy(nextPolicy, options, tracingOptions); } }; } -class TracingPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, tracingOptions) { - super(nextPolicy, options); - this.userAgent = tracingOptions.userAgent; +var TracingPolicy = /** @class */ (function (_super) { + tslib.__extends(TracingPolicy, _super); + function TracingPolicy(nextPolicy, options, tracingOptions) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.userAgent = tracingOptions.userAgent; + return _this; } - async sendRequest(request) { - if (!request.tracingContext) { - return this._nextPolicy.sendRequest(request); - } - // create a new span - const path = URLBuilder.parse(request.url).getPath() || "/"; - const { span } = createSpan(path, { - tracingOptions: { - spanOptions: Object.assign(Object.assign({}, request.spanOptions), { kind: coreTracing.SpanKind.CLIENT }), - tracingContext: request.tracingContext - } - }); - span.setAttributes({ - "http.method": request.method, - "http.url": request.url, - requestId: request.requestId - }); - if (this.userAgent) { - span.setAttribute("http.user_agent", this.userAgent); - } - try { - // set headers - const spanContext = span.spanContext(); - const traceParentHeader = coreTracing.getTraceParentHeader(spanContext); - if (traceParentHeader && coreTracing.isSpanContextValid(spanContext)) { - request.headers.set("traceparent", traceParentHeader); - const traceState = spanContext.traceState && spanContext.traceState.serialize(); - // if tracestate is set, traceparent MUST be set, so only set tracestate after traceparent - if (traceState) { - request.headers.set("tracestate", traceState); - } - } - const response = await this._nextPolicy.sendRequest(request); - span.setAttribute("http.status_code", response.status); - const serviceRequestId = response.headers.get("x-ms-request-id"); - if (serviceRequestId) { - span.setAttribute("serviceRequestId", serviceRequestId); - } - span.setStatus({ - code: coreTracing.SpanStatusCode.OK - }); - return response; - } - catch (err) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: err.message + TracingPolicy.prototype.sendRequest = function (request) { + return tslib.__awaiter(this, void 0, void 0, function () { + var path, span, spanContext, traceParentHeader, traceState, response, serviceRequestId, err_1; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + if (!request.tracingContext) { + return [2 /*return*/, this._nextPolicy.sendRequest(request)]; + } + path = URLBuilder.parse(request.url).getPath() || "/"; + span = createSpan(path, { + tracingOptions: { + spanOptions: tslib.__assign(tslib.__assign({}, request.spanOptions), { kind: coreTracing.SpanKind.CLIENT }), + tracingContext: request.tracingContext + } + }).span; + span.setAttributes({ + "http.method": request.method, + "http.url": request.url, + requestId: request.requestId + }); + if (this.userAgent) { + span.setAttribute("http.user_agent", this.userAgent); + } + _a.label = 1; + case 1: + _a.trys.push([1, 3, 4, 5]); + spanContext = span.context(); + traceParentHeader = coreTracing.getTraceParentHeader(spanContext); + if (traceParentHeader) { + request.headers.set("traceparent", traceParentHeader); + traceState = spanContext.traceState && spanContext.traceState.serialize(); + // if tracestate is set, traceparent MUST be set, so only set tracestate after traceparent + if (traceState) { + request.headers.set("tracestate", traceState); + } + } + return [4 /*yield*/, this._nextPolicy.sendRequest(request)]; + case 2: + response = _a.sent(); + span.setAttribute("http.status_code", response.status); + serviceRequestId = response.headers.get("x-ms-request-id"); + if (serviceRequestId) { + span.setAttribute("serviceRequestId", serviceRequestId); + } + span.setStatus({ + code: coreTracing.SpanStatusCode.OK + }); + return [2 /*return*/, response]; + case 3: + err_1 = _a.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: err_1.message + }); + span.setAttribute("http.status_code", err_1.statusCode); + throw err_1; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - span.setAttribute("http.status_code", err.statusCode); - throw err; - } - finally { - span.end(); - } - } -} + }); + }; + return TracingPolicy; +}(BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. /** @@ -13437,7 +14522,7 @@ class TracingPolicy extends BaseRequestPolicy { */ function disableResponseDecompressionPolicy() { return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new DisableResponseDecompressionPolicy(nextPolicy, options); } }; @@ -13446,7 +14531,8 @@ function disableResponseDecompressionPolicy() { * A policy to disable response decompression according to Accept-Encoding header * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding */ -class DisableResponseDecompressionPolicy extends BaseRequestPolicy { +var DisableResponseDecompressionPolicy = /** @class */ (function (_super) { + tslib.__extends(DisableResponseDecompressionPolicy, _super); /** * Creates an instance of DisableResponseDecompressionPolicy. * @@ -13455,8 +14541,8 @@ class DisableResponseDecompressionPolicy extends BaseRequestPolicy { */ // The parent constructor is protected. /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */ - constructor(nextPolicy, options) { - super(nextPolicy, options); + function DisableResponseDecompressionPolicy(nextPolicy, options) { + return _super.call(this, nextPolicy, options) || this; } /** * Sends out request. @@ -13464,16 +14550,21 @@ class DisableResponseDecompressionPolicy extends BaseRequestPolicy { * @param request - * @returns */ - async sendRequest(request) { - request.decompressResponse = false; - return this._nextPolicy.sendRequest(request); - } -} + DisableResponseDecompressionPolicy.prototype.sendRequest = function (request) { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + request.decompressResponse = false; + return [2 /*return*/, this._nextPolicy.sendRequest(request)]; + }); + }); + }; + return DisableResponseDecompressionPolicy; +}(BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. function ndJsonPolicy() { return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new NdJsonPolicy(nextPolicy, options); } }; @@ -13481,30 +14572,37 @@ function ndJsonPolicy() { /** * NdJsonPolicy that formats a JSON array as newline-delimited JSON */ -class NdJsonPolicy extends BaseRequestPolicy { +var NdJsonPolicy = /** @class */ (function (_super) { + tslib.__extends(NdJsonPolicy, _super); /** * Creates an instance of KeepAlivePolicy. */ - constructor(nextPolicy, options) { - super(nextPolicy, options); + function NdJsonPolicy(nextPolicy, options) { + return _super.call(this, nextPolicy, options) || this; } /** * Sends a request. */ - async sendRequest(request) { - // There currently isn't a good way to bypass the serializer - if (typeof request.body === "string" && request.body.startsWith("[")) { - const body = JSON.parse(request.body); - if (Array.isArray(body)) { - request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); - } - } - return this._nextPolicy.sendRequest(request); - } -} + NdJsonPolicy.prototype.sendRequest = function (request) { + return tslib.__awaiter(this, void 0, void 0, function () { + var body; + return tslib.__generator(this, function (_a) { + // There currently isn't a good way to bypass the serializer + if (typeof request.body === "string" && request.body.startsWith("[")) { + body = JSON.parse(request.body); + if (Array.isArray(body)) { + request.body = body.map(function (item) { return JSON.stringify(item) + "\n"; }).join(""); + } + } + return [2 /*return*/, this._nextPolicy.sendRequest(request)]; + }); + }); + }; + return NdJsonPolicy; +}(BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. -let cachedHttpClient; +var cachedHttpClient; function getCachedDefaultHttpClient() { if (!cachedHttpClient) { cachedHttpClient = new NodeFetchHttpClient(); @@ -13516,28 +14614,29 @@ function getCachedDefaultHttpClient() { /** * ServiceClient sends service requests and receives responses. */ -class ServiceClient { +var ServiceClient = /** @class */ (function () { /** * The ServiceClient constructor * @param credentials - The credentials used for authentication with the service. * @param options - The service client options that govern the behavior of the client. */ - constructor(credentials, + function ServiceClient(credentials, /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options */ options) { + var _this = this; if (!options) { options = {}; } this._withCredentials = options.withCredentials || false; this._httpClient = options.httpClient || getCachedDefaultHttpClient(); this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger); - let requestPolicyFactories; + var requestPolicyFactories; if (Array.isArray(options.requestPolicyFactories)) { logger.info("ServiceClient: using custom request policies"); requestPolicyFactories = options.requestPolicyFactories; } else { - let authPolicyFactory = undefined; + var authPolicyFactory = undefined; if (coreAuth.isTokenCredential(credentials)) { logger.info("ServiceClient: creating bearer token authentication policy from provided credentials"); // Create a wrapped RequestPolicyFactory here so that we can provide the @@ -13546,16 +14645,16 @@ class ServiceClient { // implementations do not set baseUri until after ServiceClient's constructor // is finished, leaving baseUri empty at the time when it is needed to // build the correct scope name. - const wrappedPolicyFactory = () => { - let bearerTokenPolicyFactory = undefined; + var wrappedPolicyFactory = function () { + var bearerTokenPolicyFactory = undefined; // eslint-disable-next-line @typescript-eslint/no-this-alias - const serviceClient = this; - const serviceClientOptions = options; + var serviceClient = _this; + var serviceClientOptions = options; return { - create(nextPolicy, createOptions) { - const credentialScopes = getCredentialScopes(serviceClientOptions, serviceClient.baseUri); + create: function (nextPolicy, createOptions) { + var credentialScopes = getCredentialScopes(serviceClientOptions, serviceClient.baseUri); if (!credentialScopes) { - throw new Error(`When using credential, the ServiceClient must contain a baseUri or a credentialScopes in ServiceClientOptions. Unable to create a bearerTokenAuthenticationPolicy`); + throw new Error("When using credential, the ServiceClient must contain a baseUri or a credentialScopes in ServiceClientOptions. Unable to create a bearerTokenAuthenticationPolicy"); } if (bearerTokenPolicyFactory === undefined || bearerTokenPolicyFactory === null) { bearerTokenPolicyFactory = bearerTokenAuthenticationPolicy(credentials, credentialScopes); @@ -13578,7 +14677,7 @@ class ServiceClient { if (options.requestPolicyFactories) { // options.requestPolicyFactories can also be a function that manipulates // the default requestPolicyFactories array - const newRequestPolicyFactories = options.requestPolicyFactories(requestPolicyFactories); + var newRequestPolicyFactories = options.requestPolicyFactories(requestPolicyFactories); if (newRequestPolicyFactories) { requestPolicyFactories = newRequestPolicyFactories; } @@ -13589,11 +14688,11 @@ class ServiceClient { /** * Send the provided httpRequest. */ - sendRequest(options) { + ServiceClient.prototype.sendRequest = function (options) { if (options === null || options === undefined || typeof options !== "object") { throw new Error("options cannot be null or undefined and it must be of type object."); } - let httpRequest; + var httpRequest; try { if (isWebResourceLike(options)) { options.validateRequestProperties(); @@ -13607,220 +14706,242 @@ class ServiceClient { catch (error) { return Promise.reject(error); } - let httpPipeline = this._httpClient; + var httpPipeline = this._httpClient; if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) { - for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) { + for (var i = this._requestPolicyFactories.length - 1; i >= 0; --i) { httpPipeline = this._requestPolicyFactories[i].create(httpPipeline, this._requestPolicyOptions); } } return httpPipeline.sendRequest(httpRequest); - } + }; /** * Send an HTTP request that is populated using the provided OperationSpec. * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. * @param operationSpec - The OperationSpec to use to populate the httpRequest. * @param callback - The callback to call when the response is received. */ - async sendOperationRequest(operationArguments, operationSpec, callback) { + ServiceClient.prototype.sendOperationRequest = function (operationArguments, operationSpec, callback) { var _a; - if (typeof operationArguments.options === "function") { - callback = operationArguments.options; - operationArguments.options = undefined; - } - const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; - const httpRequest = new WebResource(); - let result; - try { - const baseUri = operationSpec.baseUrl || this.baseUri; - if (!baseUri) { - throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use."); - } - httpRequest.method = operationSpec.httpMethod; - httpRequest.operationSpec = operationSpec; - const requestUrl = URLBuilder.parse(baseUri); - if (operationSpec.path) { - requestUrl.appendPath(operationSpec.path); - } - if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) { - for (const urlParameter of operationSpec.urlParameters) { - let urlParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, urlParameter, operationSpec.serializer); - urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, getPathStringFromParameter(urlParameter), serializerOptions); - if (!urlParameter.skipEncoding) { - urlParameterValue = encodeURIComponent(urlParameterValue); - } - requestUrl.replaceAll(`{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`, urlParameterValue); - } - } - if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) { - for (const queryParameter of operationSpec.queryParameters) { - let queryParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, queryParameter, operationSpec.serializer); - if (queryParameterValue !== undefined && queryParameterValue !== null) { - queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter), serializerOptions); - if (queryParameter.collectionFormat !== undefined && - queryParameter.collectionFormat !== null) { - if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Multi) { - if (queryParameterValue.length === 0) { - // The collection is empty, no need to try serializing the current queryParam - continue; + return tslib.__awaiter(this, void 0, void 0, function () { + var serializerOptions, httpRequest, result, baseUri, requestUrl, _i, _b, urlParameter, urlParameterValue, _c, _d, queryParameter, queryParameterValue, index, item, index, contentType, _e, _f, headerParameter, headerValue, headerCollectionPrefix, _g, _h, key, options, customHeaderName, rawResponse, sendRequestError, error_1, error_2, cb; + return tslib.__generator(this, function (_j) { + switch (_j.label) { + case 0: + if (typeof operationArguments.options === "function") { + callback = operationArguments.options; + operationArguments.options = undefined; + } + serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; + httpRequest = new WebResource(); + _j.label = 1; + case 1: + _j.trys.push([1, 6, , 7]); + baseUri = operationSpec.baseUrl || this.baseUri; + if (!baseUri) { + throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use."); + } + httpRequest.method = operationSpec.httpMethod; + httpRequest.operationSpec = operationSpec; + requestUrl = URLBuilder.parse(baseUri); + if (operationSpec.path) { + requestUrl.appendPath(operationSpec.path); + } + if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) { + for (_i = 0, _b = operationSpec.urlParameters; _i < _b.length; _i++) { + urlParameter = _b[_i]; + urlParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, urlParameter, operationSpec.serializer); + urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, getPathStringFromParameter(urlParameter), serializerOptions); + if (!urlParameter.skipEncoding) { + urlParameterValue = encodeURIComponent(urlParameterValue); } - else { - for (const index in queryParameterValue) { - const item = queryParameterValue[index]; - queryParameterValue[index] = - item === undefined || item === null ? "" : item.toString(); + requestUrl.replaceAll("{" + (urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)) + "}", urlParameterValue); + } + } + if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) { + for (_c = 0, _d = operationSpec.queryParameters; _c < _d.length; _c++) { + queryParameter = _d[_c]; + queryParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, queryParameter, operationSpec.serializer); + if (queryParameterValue !== undefined && queryParameterValue !== null) { + queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter), serializerOptions); + if (queryParameter.collectionFormat !== undefined && + queryParameter.collectionFormat !== null) { + if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Multi) { + if (queryParameterValue.length === 0) { + // The collection is empty, no need to try serializing the current queryParam + continue; + } + else { + for (index in queryParameterValue) { + item = queryParameterValue[index]; + queryParameterValue[index] = + item === undefined || item === null ? "" : item.toString(); + } + } + } + else if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Ssv || + queryParameter.collectionFormat === exports.QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + } + if (!queryParameter.skipEncoding) { + if (Array.isArray(queryParameterValue)) { + for (index in queryParameterValue) { + if (queryParameterValue[index] !== undefined && + queryParameterValue[index] !== null) { + queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]); + } + } + } + else { + queryParameterValue = encodeURIComponent(queryParameterValue); + } + } + if (queryParameter.collectionFormat !== undefined && + queryParameter.collectionFormat !== null && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Multi && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Ssv && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); } + requestUrl.setQueryParameter(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue); } } - else if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Ssv || - queryParameter.collectionFormat === exports.QueryCollectionFormat.Tsv) { - queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); - } } - if (!queryParameter.skipEncoding) { - if (Array.isArray(queryParameterValue)) { - for (const index in queryParameterValue) { - if (queryParameterValue[index] !== undefined && - queryParameterValue[index] !== null) { - queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]); + httpRequest.url = requestUrl.toString(); + contentType = operationSpec.contentType || this.requestContentType; + if (contentType && operationSpec.requestBody) { + httpRequest.headers.set("Content-Type", contentType); + } + if (operationSpec.headerParameters) { + for (_e = 0, _f = operationSpec.headerParameters; _e < _f.length; _e++) { + headerParameter = _f[_e]; + headerValue = getOperationArgumentValueFromParameter(this, operationArguments, headerParameter, operationSpec.serializer); + if (headerValue !== undefined && headerValue !== null) { + headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter), serializerOptions); + headerCollectionPrefix = headerParameter.mapper + .headerCollectionPrefix; + if (headerCollectionPrefix) { + for (_g = 0, _h = Object.keys(headerValue); _g < _h.length; _g++) { + key = _h[_g]; + httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]); + } + } + else { + httpRequest.headers.set(headerParameter.mapper.serializedName || + getPathStringFromParameter(headerParameter), headerValue); } } } - else { - queryParameterValue = encodeURIComponent(queryParameterValue); + } + options = operationArguments.options; + if (options) { + if (options.customHeaders) { + for (customHeaderName in options.customHeaders) { + httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]); + } + } + if (options.abortSignal) { + httpRequest.abortSignal = options.abortSignal; + } + if (options.timeout) { + httpRequest.timeout = options.timeout; + } + if (options.onUploadProgress) { + httpRequest.onUploadProgress = options.onUploadProgress; + } + if (options.onDownloadProgress) { + httpRequest.onDownloadProgress = options.onDownloadProgress; + } + if (options.spanOptions) { + httpRequest.spanOptions = options.spanOptions; + } + if (options.tracingContext) { + httpRequest.tracingContext = options.tracingContext; + } + if (options.shouldDeserialize !== undefined && options.shouldDeserialize !== null) { + httpRequest.shouldDeserialize = options.shouldDeserialize; } } - if (queryParameter.collectionFormat !== undefined && - queryParameter.collectionFormat !== null && - queryParameter.collectionFormat !== exports.QueryCollectionFormat.Multi && - queryParameter.collectionFormat !== exports.QueryCollectionFormat.Ssv && - queryParameter.collectionFormat !== exports.QueryCollectionFormat.Tsv) { - queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + httpRequest.withCredentials = this._withCredentials; + serializeRequestBody(this, httpRequest, operationArguments, operationSpec); + if (httpRequest.streamResponseStatusCodes === undefined) { + httpRequest.streamResponseStatusCodes = getStreamResponseStatusCodes(operationSpec); } - requestUrl.setQueryParameter(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue); - } - } - } - httpRequest.url = requestUrl.toString(); - const contentType = operationSpec.contentType || this.requestContentType; - if (contentType && operationSpec.requestBody) { - httpRequest.headers.set("Content-Type", contentType); - } - if (operationSpec.headerParameters) { - for (const headerParameter of operationSpec.headerParameters) { - let headerValue = getOperationArgumentValueFromParameter(this, operationArguments, headerParameter, operationSpec.serializer); - if (headerValue !== undefined && headerValue !== null) { - headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter), serializerOptions); - const headerCollectionPrefix = headerParameter.mapper - .headerCollectionPrefix; - if (headerCollectionPrefix) { - for (const key of Object.keys(headerValue)) { - httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]); + rawResponse = void 0; + sendRequestError = void 0; + _j.label = 2; + case 2: + _j.trys.push([2, 4, , 5]); + return [4 /*yield*/, this.sendRequest(httpRequest)]; + case 3: + rawResponse = _j.sent(); + return [3 /*break*/, 5]; + case 4: + error_1 = _j.sent(); + sendRequestError = error_1; + return [3 /*break*/, 5]; + case 5: + if (sendRequestError) { + if (sendRequestError.response) { + sendRequestError.details = flattenResponse(sendRequestError.response, operationSpec.responses[sendRequestError.statusCode] || + operationSpec.responses["default"]); } + result = Promise.reject(sendRequestError); } else { - httpRequest.headers.set(headerParameter.mapper.serializedName || - getPathStringFromParameter(headerParameter), headerValue); + result = Promise.resolve(flattenResponse(rawResponse, operationSpec.responses[rawResponse.status])); } - } - } - } - const options = operationArguments.options; - if (options) { - if (options.customHeaders) { - for (const customHeaderName in options.customHeaders) { - httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]); - } - } - if (options.abortSignal) { - httpRequest.abortSignal = options.abortSignal; - } - if (options.timeout) { - httpRequest.timeout = options.timeout; - } - if (options.onUploadProgress) { - httpRequest.onUploadProgress = options.onUploadProgress; - } - if (options.onDownloadProgress) { - httpRequest.onDownloadProgress = options.onDownloadProgress; - } - if (options.spanOptions) { - httpRequest.spanOptions = options.spanOptions; - } - if (options.tracingContext) { - httpRequest.tracingContext = options.tracingContext; - } - if (options.shouldDeserialize !== undefined && options.shouldDeserialize !== null) { - httpRequest.shouldDeserialize = options.shouldDeserialize; - } - } - httpRequest.withCredentials = this._withCredentials; - serializeRequestBody(this, httpRequest, operationArguments, operationSpec); - if (httpRequest.streamResponseStatusCodes === undefined) { - httpRequest.streamResponseStatusCodes = getStreamResponseStatusCodes(operationSpec); - } - let rawResponse; - let sendRequestError; - try { - rawResponse = await this.sendRequest(httpRequest); - } - catch (error) { - sendRequestError = error; - } - if (sendRequestError) { - if (sendRequestError.response) { - sendRequestError.details = flattenResponse(sendRequestError.response, operationSpec.responses[sendRequestError.statusCode] || - operationSpec.responses["default"]); + return [3 /*break*/, 7]; + case 6: + error_2 = _j.sent(); + result = Promise.reject(error_2); + return [3 /*break*/, 7]; + case 7: + cb = callback; + if (cb) { + result + .then(function (res) { return cb(null, res._response.parsedBody, res._response.request, res._response); }) + .catch(function (err) { return cb(err); }); + } + return [2 /*return*/, result]; } - result = Promise.reject(sendRequestError); - } - else { - result = Promise.resolve(flattenResponse(rawResponse, operationSpec.responses[rawResponse.status])); - } - } - catch (error) { - result = Promise.reject(error); - } - const cb = callback; - if (cb) { - result - .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response)) - .catch((err) => cb(err)); - } - return result; - } -} + }); + }); + }; + return ServiceClient; +}()); function serializeRequestBody(serviceClient, httpRequest, operationArguments, operationSpec) { var _a, _b, _c, _d, _e, _f; - const serializerOptions = (_b = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions) !== null && _b !== void 0 ? _b : {}; - const updatedOptions = { + var serializerOptions = (_b = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions) !== null && _b !== void 0 ? _b : {}; + var updatedOptions = { rootName: (_c = serializerOptions.rootName) !== null && _c !== void 0 ? _c : "", includeRoot: (_d = serializerOptions.includeRoot) !== null && _d !== void 0 ? _d : false, xmlCharKey: (_e = serializerOptions.xmlCharKey) !== null && _e !== void 0 ? _e : XML_CHARKEY }; - const xmlCharKey = serializerOptions.xmlCharKey; + var xmlCharKey = serializerOptions.xmlCharKey; if (operationSpec.requestBody && operationSpec.requestBody.mapper) { httpRequest.body = getOperationArgumentValueFromParameter(serviceClient, operationArguments, operationSpec.requestBody, operationSpec.serializer); - const bodyMapper = operationSpec.requestBody.mapper; - const { required, xmlName, xmlElementName, serializedName, xmlNamespace, xmlNamespacePrefix } = bodyMapper; - const typeName = bodyMapper.type.name; + var bodyMapper = operationSpec.requestBody.mapper; + var required = bodyMapper.required, xmlName = bodyMapper.xmlName, xmlElementName = bodyMapper.xmlElementName, serializedName = bodyMapper.serializedName, xmlNamespace = bodyMapper.xmlNamespace, xmlNamespacePrefix = bodyMapper.xmlNamespacePrefix; + var typeName = bodyMapper.type.name; try { if ((httpRequest.body !== undefined && httpRequest.body !== null) || required) { - const requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody); + var requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody); httpRequest.body = operationSpec.serializer.serialize(bodyMapper, httpRequest.body, requestBodyParameterPathString, updatedOptions); - const isStream = typeName === MapperType.Stream; + var isStream = typeName === MapperType.Stream; if (operationSpec.isXML) { - const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : "xmlns"; - const value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, httpRequest.body, updatedOptions); + var xmlnsKey = xmlNamespacePrefix ? "xmlns:" + xmlNamespacePrefix : "xmlns"; + var value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, httpRequest.body, updatedOptions); if (typeName === MapperType.Sequence) { httpRequest.body = stringifyXML(prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { rootName: xmlName || serializedName, - xmlCharKey + xmlCharKey: xmlCharKey }); } else if (!isStream) { httpRequest.body = stringifyXML(value, { rootName: xmlName || serializedName, - xmlCharKey + xmlCharKey: xmlCharKey }); } } @@ -13836,15 +14957,16 @@ function serializeRequestBody(serviceClient, httpRequest, operationArguments, op } } catch (error) { - throw new Error(`Error "${error.message}" occurred in serializing the payload - ${JSON.stringify(serializedName, undefined, " ")}.`); + throw new Error("Error \"" + error.message + "\" occurred in serializing the payload - " + JSON.stringify(serializedName, undefined, " ") + "."); } } else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { httpRequest.formData = {}; - for (const formDataParameter of operationSpec.formDataParameters) { - const formDataParameterValue = getOperationArgumentValueFromParameter(serviceClient, operationArguments, formDataParameter, operationSpec.serializer); + for (var _i = 0, _g = operationSpec.formDataParameters; _i < _g.length; _i++) { + var formDataParameter = _g[_i]; + var formDataParameterValue = getOperationArgumentValueFromParameter(serviceClient, operationArguments, formDataParameter, operationSpec.serializer); if (formDataParameterValue !== undefined && formDataParameterValue !== null) { - const formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); + var formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, getPathStringFromParameter(formDataParameter), updatedOptions); } } @@ -13854,18 +14976,19 @@ function serializeRequestBody(serviceClient, httpRequest, operationArguments, op * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself */ function getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, serializedValue, options) { + var _a; // Composite and Sequence schemas already got their root namespace set during serialization // We just need to add xmlns to the other schema types if (xmlNamespace && !["Composite", "Sequence", "Dictionary"].includes(typeName)) { - const result = {}; + var result = {}; result[options.xmlCharKey] = serializedValue; - result[XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace }; + result[XML_ATTRKEY] = (_a = {}, _a[xmlnsKey] = xmlNamespace, _a); return result; } return serializedValue; } function getValueOrFunctionResult(value, defaultValueCreator) { - let result; + var result; if (typeof value === "string") { result = value; } @@ -13878,15 +15001,15 @@ function getValueOrFunctionResult(value, defaultValueCreator) { return result; } function createDefaultRequestPolicyFactories(authPolicyFactory, options) { - const factories = []; + var factories = []; if (options.generateClientRequestIdHeader) { factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName)); } if (authPolicyFactory) { factories.push(authPolicyFactory); } - const userAgentHeaderName = getValueOrFunctionResult(options.userAgentHeaderName, getDefaultUserAgentHeaderName); - const userAgentHeaderValue = getValueOrFunctionResult(options.userAgent, getDefaultUserAgentValue); + var userAgentHeaderName = getValueOrFunctionResult(options.userAgentHeaderName, getDefaultUserAgentHeaderName); + var userAgentHeaderValue = getValueOrFunctionResult(options.userAgent, getDefaultUserAgentValue); if (userAgentHeaderName && userAgentHeaderValue) { factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue })); } @@ -13898,37 +15021,37 @@ function createDefaultRequestPolicyFactories(authPolicyFactory, options) { factories.push(throttlingRetryPolicy()); } factories.push(deserializationPolicy(options.deserializationContentTypes)); - if (isNode) { + { factories.push(proxyPolicy(options.proxySettings)); } factories.push(logPolicy({ logger: logger.info })); return factories; } function createPipelineFromOptions(pipelineOptions, authPolicyFactory) { - const requestPolicyFactories = []; + var requestPolicyFactories = []; if (pipelineOptions.sendStreamingJson) { requestPolicyFactories.push(ndJsonPolicy()); } - let userAgentValue = undefined; + var userAgentValue = undefined; if (pipelineOptions.userAgentOptions && pipelineOptions.userAgentOptions.userAgentPrefix) { - const userAgentInfo = []; + var userAgentInfo = []; userAgentInfo.push(pipelineOptions.userAgentOptions.userAgentPrefix); // Add the default user agent value if it isn't already specified // by the userAgentPrefix option. - const defaultUserAgentInfo = getDefaultUserAgentValue(); + var defaultUserAgentInfo = getDefaultUserAgentValue(); if (userAgentInfo.indexOf(defaultUserAgentInfo) === -1) { userAgentInfo.push(defaultUserAgentInfo); } userAgentValue = userAgentInfo.join(" "); } - const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions); - const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions); - const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions); - if (isNode) { + var keepAliveOptions = tslib.__assign(tslib.__assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions); + var retryOptions = tslib.__assign(tslib.__assign({}, DefaultRetryOptions), pipelineOptions.retryOptions); + var redirectOptions = tslib.__assign(tslib.__assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions); + { requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions)); } - const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions); - const loggingOptions = Object.assign({}, pipelineOptions.loggingOptions); + var deserializationOptions = tslib.__assign(tslib.__assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions); + var loggingOptions = tslib.__assign({}, pipelineOptions.loggingOptions); requestPolicyFactories.push(tracingPolicy({ userAgent: userAgentValue }), keepAlivePolicy(keepAliveOptions), userAgentPolicy({ value: userAgentValue }), generateClientRequestIdPolicy(), deserializationPolicy(deserializationOptions.expectedContentTypes), throttlingRetryPolicy(), systemErrorRetryPolicy(), exponentialRetryPolicy(retryOptions.maxRetries, retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs)); if (redirectOptions.handleRedirects) { requestPolicyFactories.push(redirectPolicy(redirectOptions.maxRetries)); @@ -13942,7 +15065,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) { } return { httpClient: pipelineOptions.httpClient, - requestPolicyFactories + requestPolicyFactories: requestPolicyFactories }; } function getOperationArgumentValueFromParameter(serviceClient, operationArguments, parameter, serializer) { @@ -13950,22 +15073,22 @@ function getOperationArgumentValueFromParameter(serviceClient, operationArgument } function getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameterPath, parameterMapper, serializer) { var _a; - let value; + var value; if (typeof parameterPath === "string") { parameterPath = [parameterPath]; } - const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; + var serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; if (Array.isArray(parameterPath)) { if (parameterPath.length > 0) { if (parameterMapper.isConstant) { value = parameterMapper.defaultValue; } else { - let propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); + var propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); if (!propertySearchResult.propertyFound) { propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath); } - let useDefaultValue = false; + var useDefaultValue = false; if (!propertySearchResult.propertyFound) { useDefaultValue = parameterMapper.required || @@ -13974,7 +15097,7 @@ function getOperationArgumentValueFromParameterPath(serviceClient, operationArgu value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; } // Serialize just for validation purposes. - const parameterPathString = getPathStringFromParameterPath(parameterPath, parameterMapper); + var parameterPathString = getPathStringFromParameterPath(parameterPath, parameterMapper); serializer.serialize(parameterMapper, value, parameterPathString, serializerOptions); } } @@ -13982,12 +15105,12 @@ function getOperationArgumentValueFromParameterPath(serviceClient, operationArgu if (parameterMapper.required) { value = {}; } - for (const propertyName in parameterPath) { - const propertyMapper = parameterMapper.type.modelProperties[propertyName]; - const propertyPath = parameterPath[propertyName]; - const propertyValue = getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, propertyPath, propertyMapper, serializer); + for (var propertyName in parameterPath) { + var propertyMapper = parameterMapper.type.modelProperties[propertyName]; + var propertyPath = parameterPath[propertyName]; + var propertyValue = getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, propertyPath, propertyMapper, serializer); // Serialize just for validation purposes. - const propertyPathString = getPathStringFromParameterPath(propertyPath, propertyMapper); + var propertyPathString = getPathStringFromParameterPath(propertyPath, propertyMapper); serializer.serialize(propertyMapper, propertyValue, propertyPathString, serializerOptions); if (propertyValue !== undefined && propertyValue !== null) { if (!value) { @@ -14000,10 +15123,10 @@ function getOperationArgumentValueFromParameterPath(serviceClient, operationArgu return value; } function getPropertyFromParameterPath(parent, parameterPath) { - const result = { propertyFound: false }; - let i = 0; + var result = { propertyFound: false }; + var i = 0; for (; i < parameterPath.length; ++i) { - const parameterPathPart = parameterPath[i]; + var parameterPathPart = parameterPath[i]; // Make sure to check inherited properties too, so don't use hasOwnProperty(). if (parent !== undefined && parent !== null && parameterPathPart in parent) { parent = parent[parameterPathPart]; @@ -14019,29 +15142,31 @@ function getPropertyFromParameterPath(parent, parameterPath) { return result; } function flattenResponse(_response, responseSpec) { - const parsedHeaders = _response.parsedHeaders; - const bodyMapper = responseSpec && responseSpec.bodyMapper; - const addOperationResponse = (obj) => { + var parsedHeaders = _response.parsedHeaders; + var bodyMapper = responseSpec && responseSpec.bodyMapper; + var addOperationResponse = function (obj) { return Object.defineProperty(obj, "_response", { value: _response }); }; if (bodyMapper) { - const typeName = bodyMapper.type.name; + var typeName = bodyMapper.type.name; if (typeName === "Stream") { - return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), { blobBody: _response.blobBody, readableStreamBody: _response.readableStreamBody })); + return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), { blobBody: _response.blobBody, readableStreamBody: _response.readableStreamBody })); } - const modelProperties = (typeName === "Composite" && bodyMapper.type.modelProperties) || {}; - const isPageableResponse = Object.keys(modelProperties).some((k) => modelProperties[k].serializedName === ""); + var modelProperties_1 = (typeName === "Composite" && bodyMapper.type.modelProperties) || {}; + var isPageableResponse = Object.keys(modelProperties_1).some(function (k) { return modelProperties_1[k].serializedName === ""; }); if (typeName === "Sequence" || isPageableResponse) { - const arrayResponse = [...(_response.parsedBody || [])]; - for (const key of Object.keys(modelProperties)) { - if (modelProperties[key].serializedName) { + var arrayResponse = tslib.__spreadArray([], (_response.parsedBody || [])); + for (var _i = 0, _a = Object.keys(modelProperties_1); _i < _a.length; _i++) { + var key = _a[_i]; + if (modelProperties_1[key].serializedName) { arrayResponse[key] = _response.parsedBody[key]; } } if (parsedHeaders) { - for (const key of Object.keys(parsedHeaders)) { + for (var _b = 0, _c = Object.keys(parsedHeaders); _b < _c.length; _b++) { + var key = _c[_b]; arrayResponse[key] = parsedHeaders[key]; } } @@ -14049,26 +15174,26 @@ function flattenResponse(_response, responseSpec) { return arrayResponse; } if (typeName === "Composite" || typeName === "Dictionary") { - return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), _response.parsedBody)); + return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), _response.parsedBody)); } } if (bodyMapper || _response.request.method === "HEAD" || isPrimitiveType(_response.parsedBody)) { // primitive body types and HEAD booleans - return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), { body: _response.parsedBody })); + return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), { body: _response.parsedBody })); } - return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), _response.parsedBody)); + return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), _response.parsedBody)); } function getCredentialScopes(options, baseUri) { if (options === null || options === void 0 ? void 0 : options.credentialScopes) { - const scopes = options.credentialScopes; + var scopes = options.credentialScopes; return Array.isArray(scopes) - ? scopes.map((scope) => new url.URL(scope).toString()) + ? scopes.map(function (scope) { return new url.URL(scope).toString(); }) : new url.URL(scopes).toString(); } if (baseUri) { - return `${baseUri}/.default`; + return baseUri + "/.default"; } return undefined; } @@ -14092,7 +15217,7 @@ function createSpanFunction(args) { /** * Defines the default token refresh buffer duration. */ -const TokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes +var TokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes /** * Provides an {@link AccessTokenCache} implementation which clears * the cached {@link AccessToken}'s after the expiresOnTimestamp has @@ -14100,36 +15225,38 @@ const TokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes * * @deprecated No longer used in the bearer authorization policy. */ -class ExpiringAccessTokenCache { +var ExpiringAccessTokenCache = /** @class */ (function () { /** * Constructs an instance of {@link ExpiringAccessTokenCache} with * an optional expiration buffer time. */ - constructor(tokenRefreshBufferMs = TokenRefreshBufferMs) { + function ExpiringAccessTokenCache(tokenRefreshBufferMs) { + if (tokenRefreshBufferMs === void 0) { tokenRefreshBufferMs = TokenRefreshBufferMs; } this.cachedToken = undefined; this.tokenRefreshBufferMs = tokenRefreshBufferMs; } - setCachedToken(accessToken) { + ExpiringAccessTokenCache.prototype.setCachedToken = function (accessToken) { this.cachedToken = accessToken; - } - getCachedToken() { + }; + ExpiringAccessTokenCache.prototype.getCachedToken = function () { if (this.cachedToken && Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp) { this.cachedToken = undefined; } return this.cachedToken; - } -} + }; + return ExpiringAccessTokenCache; +}()); // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. /** * Helps the core-http token authentication policies with requesting a new token if we're not currently waiting for a new token. * * @deprecated No longer used in the bearer authorization policy. */ -class AccessTokenRefresher { - constructor(credential, scopes, requiredMillisecondsBeforeNewRefresh = 30000) { +var AccessTokenRefresher = /** @class */ (function () { + function AccessTokenRefresher(credential, scopes, requiredMillisecondsBeforeNewRefresh) { + if (requiredMillisecondsBeforeNewRefresh === void 0) { requiredMillisecondsBeforeNewRefresh = 30000; } this.credential = credential; this.scopes = scopes; this.requiredMillisecondsBeforeNewRefresh = requiredMillisecondsBeforeNewRefresh; @@ -14139,38 +15266,49 @@ class AccessTokenRefresher { * Returns true if the required milliseconds(defaulted to 30000) have been passed signifying * that we are ready for a new refresh. */ - isReady() { + AccessTokenRefresher.prototype.isReady = function () { // We're only ready for a new refresh if the required milliseconds have passed. return (!this.lastCalled || Date.now() - this.lastCalled > this.requiredMillisecondsBeforeNewRefresh); - } + }; /** * Stores the time in which it is called, * then requests a new token, * then sets this.promise to undefined, * then returns the token. */ - async getToken(options) { - this.lastCalled = Date.now(); - const token = await this.credential.getToken(this.scopes, options); - this.promise = undefined; - return token || undefined; - } + AccessTokenRefresher.prototype.getToken = function (options) { + return tslib.__awaiter(this, void 0, void 0, function () { + var token; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + this.lastCalled = Date.now(); + return [4 /*yield*/, this.credential.getToken(this.scopes, options)]; + case 1: + token = _a.sent(); + this.promise = undefined; + return [2 /*return*/, token || undefined]; + } + }); + }); + }; /** * Requests a new token if we're not currently waiting for a new token. * Returns null if the required time between each call hasn't been reached. */ - refresh(options) { + AccessTokenRefresher.prototype.refresh = function (options) { if (!this.promise) { this.promise = this.getToken(options); } return this.promise; - } -} + }; + return AccessTokenRefresher; +}()); // Copyright (c) Microsoft Corporation. -const HeaderConstants = Constants.HeaderConstants; -const DEFAULT_AUTHORIZATION_SCHEME = "Basic"; -class BasicAuthenticationCredentials { +var HeaderConstants = Constants.HeaderConstants; +var DEFAULT_AUTHORIZATION_SCHEME = "Basic"; +var BasicAuthenticationCredentials = /** @class */ (function () { /** * Creates a new BasicAuthenticationCredentials object. * @@ -14178,7 +15316,8 @@ class BasicAuthenticationCredentials { * @param password - Password. * @param authorizationScheme - The authorization scheme. */ - constructor(userName, password, authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME) { + function BasicAuthenticationCredentials(userName, password, authorizationScheme) { + if (authorizationScheme === void 0) { authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; } this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { throw new Error("userName cannot be null or undefined and must be of type string."); @@ -14196,27 +15335,28 @@ class BasicAuthenticationCredentials { * @param webResource - The WebResourceLike to be signed. * @returns The signed request object. */ - signRequest(webResource) { - const credentials = `${this.userName}:${this.password}`; - const encodedCredentials = `${this.authorizationScheme} ${encodeString(credentials)}`; + BasicAuthenticationCredentials.prototype.signRequest = function (webResource) { + var credentials = this.userName + ":" + this.password; + var encodedCredentials = this.authorizationScheme + " " + encodeString(credentials); if (!webResource.headers) webResource.headers = new HttpHeaders(); webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials); return Promise.resolve(webResource); - } -} + }; + return BasicAuthenticationCredentials; +}()); // Copyright (c) Microsoft Corporation. /** * Authenticates to a service using an API key. */ -class ApiKeyCredentials { +var ApiKeyCredentials = /** @class */ (function () { /** * @param options - Specifies the options to be provided for auth. Either header or query needs to be provided. */ - constructor(options) { + function ApiKeyCredentials(options) { if (!options || (options && !options.inHeader && !options.inQuery)) { - throw new Error(`options cannot be null or undefined. Either "inHeader" or "inQuery" property of the options object needs to be provided.`); + throw new Error("options cannot be null or undefined. Either \"inHeader\" or \"inQuery\" property of the options object needs to be provided."); } this.inHeader = options.inHeader; this.inQuery = options.inQuery; @@ -14227,55 +15367,60 @@ class ApiKeyCredentials { * @param webResource - The WebResourceLike to be signed. * @returns The signed request object. */ - signRequest(webResource) { + ApiKeyCredentials.prototype.signRequest = function (webResource) { if (!webResource) { - return Promise.reject(new Error(`webResource cannot be null or undefined and must be of type "object".`)); + return Promise.reject(new Error("webResource cannot be null or undefined and must be of type \"object\".")); } if (this.inHeader) { if (!webResource.headers) { webResource.headers = new HttpHeaders(); } - for (const headerName in this.inHeader) { + for (var headerName in this.inHeader) { webResource.headers.set(headerName, this.inHeader[headerName]); } } if (this.inQuery) { if (!webResource.url) { - return Promise.reject(new Error(`url cannot be null in the request object.`)); + return Promise.reject(new Error("url cannot be null in the request object.")); } if (webResource.url.indexOf("?") < 0) { webResource.url += "?"; } - for (const key in this.inQuery) { + for (var key in this.inQuery) { if (!webResource.url.endsWith("?")) { webResource.url += "&"; } - webResource.url += `${key}=${this.inQuery[key]}`; + webResource.url += key + "=" + this.inQuery[key]; } } return Promise.resolve(webResource); - } -} + }; + return ApiKeyCredentials; +}()); // Copyright (c) Microsoft Corporation. -class TopicCredentials extends ApiKeyCredentials { +var TopicCredentials = /** @class */ (function (_super) { + tslib.__extends(TopicCredentials, _super); /** * Creates a new EventGrid TopicCredentials object. * * @param topicKey - The EventGrid topic key */ - constructor(topicKey) { + function TopicCredentials(topicKey) { + var _this = this; if (!topicKey || (topicKey && typeof topicKey !== "string")) { throw new Error("topicKey cannot be null or undefined and must be of type string."); } - const options = { + var options = { inHeader: { "aeg-sas-key": topicKey } }; - super(options); + _this = _super.call(this, options) || this; + return _this; } -} + return TopicCredentials; +}(ApiKeyCredentials)); Object.defineProperty(exports, 'isTokenCredential', { enumerable: true, @@ -14864,92 +16009,7 @@ module.exports = require("assert"); /* 359 */, /* 360 */, /* 361 */, -/* 362 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.DiagComponentLogger = void 0; -var global_utils_1 = __webpack_require__(525); -/** - * Component Logger which is meant to be used as part of any component which - * will add automatically additional namespace in front of the log message. - * It will then forward all message to global diag logger - * @example - * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' }); - * cLogger.debug('test'); - * // @opentelemetry/instrumentation-http test - */ -var DiagComponentLogger = /** @class */ (function () { - function DiagComponentLogger(props) { - this._namespace = props.namespace || 'DiagComponentLogger'; - } - DiagComponentLogger.prototype.debug = function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - return logProxy('debug', this._namespace, args); - }; - DiagComponentLogger.prototype.error = function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - return logProxy('error', this._namespace, args); - }; - DiagComponentLogger.prototype.info = function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - return logProxy('info', this._namespace, args); - }; - DiagComponentLogger.prototype.warn = function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - return logProxy('warn', this._namespace, args); - }; - DiagComponentLogger.prototype.verbose = function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - return logProxy('verbose', this._namespace, args); - }; - return DiagComponentLogger; -}()); -exports.DiagComponentLogger = DiagComponentLogger; -function logProxy(funcName, namespace, args) { - var logger = global_utils_1.getGlobal('diag'); - // shortcut if logger not set - if (!logger) { - return; - } - args.unshift(namespace); - return logger[funcName].apply(logger, args); -} -//# sourceMappingURL=ComponentLogger.js.map - -/***/ }), +/* 362 */, /* 363 */, /* 364 */, /* 365 */, @@ -14989,7 +16049,7 @@ var util = __webpack_require__(669); * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -const BlobServiceProperties = { +var BlobServiceProperties = { serializedName: "BlobServiceProperties", xmlName: "StorageServiceProperties", type: { @@ -15061,7 +16121,7 @@ const BlobServiceProperties = { } } }; -const Logging = { +var Logging = { serializedName: "Logging", type: { name: "Composite", @@ -15110,7 +16170,7 @@ const Logging = { } } }; -const RetentionPolicy = { +var RetentionPolicy = { serializedName: "RetentionPolicy", type: { name: "Composite", @@ -15137,7 +16197,7 @@ const RetentionPolicy = { } } }; -const Metrics = { +var Metrics = { serializedName: "Metrics", type: { name: "Composite", @@ -15176,7 +16236,7 @@ const Metrics = { } } }; -const CorsRule = { +var CorsRule = { serializedName: "CorsRule", type: { name: "Composite", @@ -15228,7 +16288,7 @@ const CorsRule = { } } }; -const StaticWebsite = { +var StaticWebsite = { serializedName: "StaticWebsite", type: { name: "Composite", @@ -15266,7 +16326,7 @@ const StaticWebsite = { } } }; -const StorageError = { +var StorageError = { serializedName: "StorageError", type: { name: "Composite", @@ -15289,7 +16349,7 @@ const StorageError = { } } }; -const BlobServiceStatistics = { +var BlobServiceStatistics = { serializedName: "BlobServiceStatistics", xmlName: "StorageServiceStats", type: { @@ -15307,7 +16367,7 @@ const BlobServiceStatistics = { } } }; -const GeoReplication = { +var GeoReplication = { serializedName: "GeoReplication", type: { name: "Composite", @@ -15333,7 +16393,7 @@ const GeoReplication = { } } }; -const ListContainersSegmentResponse = { +var ListContainersSegmentResponse = { serializedName: "ListContainersSegmentResponse", xmlName: "EnumerationResults", type: { @@ -15396,7 +16456,7 @@ const ListContainersSegmentResponse = { } } }; -const ContainerItem = { +var ContainerItem = { serializedName: "ContainerItem", xmlName: "Container", type: { @@ -15444,7 +16504,7 @@ const ContainerItem = { } } }; -const ContainerProperties = { +var ContainerProperties = { serializedName: "ContainerProperties", type: { name: "Composite", @@ -15549,7 +16609,7 @@ const ContainerProperties = { } } }; -const KeyInfo = { +var KeyInfo = { serializedName: "KeyInfo", type: { name: "Composite", @@ -15574,7 +16634,7 @@ const KeyInfo = { } } }; -const UserDelegationKey = { +var UserDelegationKey = { serializedName: "UserDelegationKey", type: { name: "Composite", @@ -15639,7 +16699,7 @@ const UserDelegationKey = { } } }; -const FilterBlobSegment = { +var FilterBlobSegment = { serializedName: "FilterBlobSegment", xmlName: "EnumerationResults", type: { @@ -15689,7 +16749,7 @@ const FilterBlobSegment = { } } }; -const FilterBlobItem = { +var FilterBlobItem = { serializedName: "FilterBlobItem", xmlName: "Blob", type: { @@ -15723,7 +16783,7 @@ const FilterBlobItem = { } } }; -const BlobTags = { +var BlobTags = { serializedName: "BlobTags", xmlName: "Tags", type: { @@ -15749,7 +16809,7 @@ const BlobTags = { } } }; -const BlobTag = { +var BlobTag = { serializedName: "BlobTag", xmlName: "Tag", type: { @@ -15775,7 +16835,7 @@ const BlobTag = { } } }; -const SignedIdentifier = { +var SignedIdentifier = { serializedName: "SignedIdentifier", xmlName: "SignedIdentifier", type: { @@ -15801,7 +16861,7 @@ const SignedIdentifier = { } } }; -const AccessPolicy = { +var AccessPolicy = { serializedName: "AccessPolicy", type: { name: "Composite", @@ -15831,7 +16891,7 @@ const AccessPolicy = { } } }; -const ListBlobsFlatSegmentResponse = { +var ListBlobsFlatSegmentResponse = { serializedName: "ListBlobsFlatSegmentResponse", xmlName: "EnumerationResults", type: { @@ -15895,7 +16955,7 @@ const ListBlobsFlatSegmentResponse = { } } }; -const BlobFlatListSegment = { +var BlobFlatListSegment = { serializedName: "BlobFlatListSegment", xmlName: "Blobs", type: { @@ -15920,7 +16980,7 @@ const BlobFlatListSegment = { } } }; -const BlobItemInternal = { +var BlobItemInternal = { serializedName: "BlobItemInternal", xmlName: "Blob", type: { @@ -16000,7 +17060,7 @@ const BlobItemInternal = { } } }; -const BlobPropertiesInternal = { +var BlobPropertiesInternal = { serializedName: "BlobPropertiesInternal", xmlName: "Properties", type: { @@ -16303,7 +17363,7 @@ const BlobPropertiesInternal = { } } }; -const ListBlobsHierarchySegmentResponse = { +var ListBlobsHierarchySegmentResponse = { serializedName: "ListBlobsHierarchySegmentResponse", xmlName: "EnumerationResults", type: { @@ -16374,7 +17434,7 @@ const ListBlobsHierarchySegmentResponse = { } } }; -const BlobHierarchyListSegment = { +var BlobHierarchyListSegment = { serializedName: "BlobHierarchyListSegment", xmlName: "Blobs", type: { @@ -16413,7 +17473,7 @@ const BlobHierarchyListSegment = { } } }; -const BlobPrefix = { +var BlobPrefix = { serializedName: "BlobPrefix", type: { name: "Composite", @@ -16430,7 +17490,7 @@ const BlobPrefix = { } } }; -const DataLakeStorageError = { +var DataLakeStorageError = { serializedName: "DataLakeStorageError", type: { name: "Composite", @@ -16447,7 +17507,7 @@ const DataLakeStorageError = { } } }; -const DataLakeStorageErrorError = { +var DataLakeStorageErrorError = { serializedName: "DataLakeStorageErrorError", type: { name: "Composite", @@ -16470,7 +17530,7 @@ const DataLakeStorageErrorError = { } } }; -const BlockLookupList = { +var BlockLookupList = { serializedName: "BlockLookupList", xmlName: "BlockList", type: { @@ -16519,7 +17579,7 @@ const BlockLookupList = { } } }; -const BlockList = { +var BlockList = { serializedName: "BlockList", type: { name: "Composite", @@ -16558,7 +17618,7 @@ const BlockList = { } } }; -const Block = { +var Block = { serializedName: "Block", type: { name: "Composite", @@ -16583,7 +17643,7 @@ const Block = { } } }; -const PageList = { +var PageList = { serializedName: "PageList", type: { name: "Composite", @@ -16620,7 +17680,7 @@ const PageList = { } } }; -const PageRange = { +var PageRange = { serializedName: "PageRange", xmlName: "PageRange", type: { @@ -16646,7 +17706,7 @@ const PageRange = { } } }; -const ClearRange = { +var ClearRange = { serializedName: "ClearRange", xmlName: "ClearRange", type: { @@ -16672,7 +17732,7 @@ const ClearRange = { } } }; -const QueryRequest = { +var QueryRequest = { serializedName: "QueryRequest", xmlName: "QueryRequest", type: { @@ -16714,7 +17774,7 @@ const QueryRequest = { } } }; -const QuerySerialization = { +var QuerySerialization = { serializedName: "QuerySerialization", type: { name: "Composite", @@ -16731,7 +17791,7 @@ const QuerySerialization = { } } }; -const QueryFormat = { +var QueryFormat = { serializedName: "QueryFormat", type: { name: "Composite", @@ -16772,7 +17832,7 @@ const QueryFormat = { } } }; -const DelimitedTextConfiguration = { +var DelimitedTextConfiguration = { serializedName: "DelimitedTextConfiguration", xmlName: "DelimitedTextConfiguration", type: { @@ -16822,7 +17882,7 @@ const DelimitedTextConfiguration = { } } }; -const JsonTextConfiguration = { +var JsonTextConfiguration = { serializedName: "JsonTextConfiguration", xmlName: "JsonTextConfiguration", type: { @@ -16840,7 +17900,7 @@ const JsonTextConfiguration = { } } }; -const ArrowConfiguration = { +var ArrowConfiguration = { serializedName: "ArrowConfiguration", xmlName: "ArrowConfiguration", type: { @@ -16866,7 +17926,7 @@ const ArrowConfiguration = { } } }; -const ArrowField = { +var ArrowField = { serializedName: "ArrowField", xmlName: "Field", type: { @@ -16905,7 +17965,7 @@ const ArrowField = { } } }; -const ServiceSetPropertiesHeaders = { +var ServiceSetPropertiesHeaders = { serializedName: "Service_setPropertiesHeaders", type: { name: "Composite", @@ -16942,7 +18002,7 @@ const ServiceSetPropertiesHeaders = { } } }; -const ServiceSetPropertiesExceptionHeaders = { +var ServiceSetPropertiesExceptionHeaders = { serializedName: "Service_setPropertiesExceptionHeaders", type: { name: "Composite", @@ -16958,7 +18018,7 @@ const ServiceSetPropertiesExceptionHeaders = { } } }; -const ServiceGetPropertiesHeaders = { +var ServiceGetPropertiesHeaders = { serializedName: "Service_getPropertiesHeaders", type: { name: "Composite", @@ -16995,7 +18055,7 @@ const ServiceGetPropertiesHeaders = { } } }; -const ServiceGetPropertiesExceptionHeaders = { +var ServiceGetPropertiesExceptionHeaders = { serializedName: "Service_getPropertiesExceptionHeaders", type: { name: "Composite", @@ -17011,7 +18071,7 @@ const ServiceGetPropertiesExceptionHeaders = { } } }; -const ServiceGetStatisticsHeaders = { +var ServiceGetStatisticsHeaders = { serializedName: "Service_getStatisticsHeaders", type: { name: "Composite", @@ -17055,7 +18115,7 @@ const ServiceGetStatisticsHeaders = { } } }; -const ServiceGetStatisticsExceptionHeaders = { +var ServiceGetStatisticsExceptionHeaders = { serializedName: "Service_getStatisticsExceptionHeaders", type: { name: "Composite", @@ -17071,7 +18131,7 @@ const ServiceGetStatisticsExceptionHeaders = { } } }; -const ServiceListContainersSegmentHeaders = { +var ServiceListContainersSegmentHeaders = { serializedName: "Service_listContainersSegmentHeaders", type: { name: "Composite", @@ -17108,7 +18168,7 @@ const ServiceListContainersSegmentHeaders = { } } }; -const ServiceListContainersSegmentExceptionHeaders = { +var ServiceListContainersSegmentExceptionHeaders = { serializedName: "Service_listContainersSegmentExceptionHeaders", type: { name: "Composite", @@ -17124,7 +18184,7 @@ const ServiceListContainersSegmentExceptionHeaders = { } } }; -const ServiceGetUserDelegationKeyHeaders = { +var ServiceGetUserDelegationKeyHeaders = { serializedName: "Service_getUserDelegationKeyHeaders", type: { name: "Composite", @@ -17168,7 +18228,7 @@ const ServiceGetUserDelegationKeyHeaders = { } } }; -const ServiceGetUserDelegationKeyExceptionHeaders = { +var ServiceGetUserDelegationKeyExceptionHeaders = { serializedName: "Service_getUserDelegationKeyExceptionHeaders", type: { name: "Composite", @@ -17184,7 +18244,7 @@ const ServiceGetUserDelegationKeyExceptionHeaders = { } } }; -const ServiceGetAccountInfoHeaders = { +var ServiceGetAccountInfoHeaders = { serializedName: "Service_getAccountInfoHeaders", type: { name: "Composite", @@ -17263,7 +18323,7 @@ const ServiceGetAccountInfoHeaders = { } } }; -const ServiceGetAccountInfoExceptionHeaders = { +var ServiceGetAccountInfoExceptionHeaders = { serializedName: "Service_getAccountInfoExceptionHeaders", type: { name: "Composite", @@ -17279,7 +18339,7 @@ const ServiceGetAccountInfoExceptionHeaders = { } } }; -const ServiceSubmitBatchHeaders = { +var ServiceSubmitBatchHeaders = { serializedName: "Service_submitBatchHeaders", type: { name: "Composite", @@ -17323,7 +18383,7 @@ const ServiceSubmitBatchHeaders = { } } }; -const ServiceSubmitBatchExceptionHeaders = { +var ServiceSubmitBatchExceptionHeaders = { serializedName: "Service_submitBatchExceptionHeaders", type: { name: "Composite", @@ -17339,7 +18399,7 @@ const ServiceSubmitBatchExceptionHeaders = { } } }; -const ServiceFilterBlobsHeaders = { +var ServiceFilterBlobsHeaders = { serializedName: "Service_filterBlobsHeaders", type: { name: "Composite", @@ -17383,7 +18443,7 @@ const ServiceFilterBlobsHeaders = { } } }; -const ServiceFilterBlobsExceptionHeaders = { +var ServiceFilterBlobsExceptionHeaders = { serializedName: "Service_filterBlobsExceptionHeaders", type: { name: "Composite", @@ -17399,7 +18459,7 @@ const ServiceFilterBlobsExceptionHeaders = { } } }; -const ContainerCreateHeaders = { +var ContainerCreateHeaders = { serializedName: "Container_createHeaders", type: { name: "Composite", @@ -17457,7 +18517,7 @@ const ContainerCreateHeaders = { } } }; -const ContainerCreateExceptionHeaders = { +var ContainerCreateExceptionHeaders = { serializedName: "Container_createExceptionHeaders", type: { name: "Composite", @@ -17473,7 +18533,7 @@ const ContainerCreateExceptionHeaders = { } } }; -const ContainerGetPropertiesHeaders = { +var ContainerGetPropertiesHeaders = { serializedName: "Container_getPropertiesHeaders", type: { name: "Composite", @@ -17606,7 +18666,7 @@ const ContainerGetPropertiesHeaders = { } } }; -const ContainerGetPropertiesExceptionHeaders = { +var ContainerGetPropertiesExceptionHeaders = { serializedName: "Container_getPropertiesExceptionHeaders", type: { name: "Composite", @@ -17622,7 +18682,7 @@ const ContainerGetPropertiesExceptionHeaders = { } } }; -const ContainerDeleteHeaders = { +var ContainerDeleteHeaders = { serializedName: "Container_deleteHeaders", type: { name: "Composite", @@ -17666,7 +18726,7 @@ const ContainerDeleteHeaders = { } } }; -const ContainerDeleteExceptionHeaders = { +var ContainerDeleteExceptionHeaders = { serializedName: "Container_deleteExceptionHeaders", type: { name: "Composite", @@ -17682,7 +18742,7 @@ const ContainerDeleteExceptionHeaders = { } } }; -const ContainerSetMetadataHeaders = { +var ContainerSetMetadataHeaders = { serializedName: "Container_setMetadataHeaders", type: { name: "Composite", @@ -17740,7 +18800,7 @@ const ContainerSetMetadataHeaders = { } } }; -const ContainerSetMetadataExceptionHeaders = { +var ContainerSetMetadataExceptionHeaders = { serializedName: "Container_setMetadataExceptionHeaders", type: { name: "Composite", @@ -17756,7 +18816,7 @@ const ContainerSetMetadataExceptionHeaders = { } } }; -const ContainerGetAccessPolicyHeaders = { +var ContainerGetAccessPolicyHeaders = { serializedName: "Container_getAccessPolicyHeaders", type: { name: "Composite", @@ -17822,7 +18882,7 @@ const ContainerGetAccessPolicyHeaders = { } } }; -const ContainerGetAccessPolicyExceptionHeaders = { +var ContainerGetAccessPolicyExceptionHeaders = { serializedName: "Container_getAccessPolicyExceptionHeaders", type: { name: "Composite", @@ -17838,7 +18898,7 @@ const ContainerGetAccessPolicyExceptionHeaders = { } } }; -const ContainerSetAccessPolicyHeaders = { +var ContainerSetAccessPolicyHeaders = { serializedName: "Container_setAccessPolicyHeaders", type: { name: "Composite", @@ -17896,7 +18956,7 @@ const ContainerSetAccessPolicyHeaders = { } } }; -const ContainerSetAccessPolicyExceptionHeaders = { +var ContainerSetAccessPolicyExceptionHeaders = { serializedName: "Container_setAccessPolicyExceptionHeaders", type: { name: "Composite", @@ -17912,7 +18972,7 @@ const ContainerSetAccessPolicyExceptionHeaders = { } } }; -const ContainerRestoreHeaders = { +var ContainerRestoreHeaders = { serializedName: "Container_restoreHeaders", type: { name: "Composite", @@ -17956,7 +19016,7 @@ const ContainerRestoreHeaders = { } } }; -const ContainerRestoreExceptionHeaders = { +var ContainerRestoreExceptionHeaders = { serializedName: "Container_restoreExceptionHeaders", type: { name: "Composite", @@ -17972,7 +19032,7 @@ const ContainerRestoreExceptionHeaders = { } } }; -const ContainerRenameHeaders = { +var ContainerRenameHeaders = { serializedName: "Container_renameHeaders", type: { name: "Composite", @@ -18016,7 +19076,7 @@ const ContainerRenameHeaders = { } } }; -const ContainerRenameExceptionHeaders = { +var ContainerRenameExceptionHeaders = { serializedName: "Container_renameExceptionHeaders", type: { name: "Composite", @@ -18032,7 +19092,7 @@ const ContainerRenameExceptionHeaders = { } } }; -const ContainerSubmitBatchHeaders = { +var ContainerSubmitBatchHeaders = { serializedName: "Container_submitBatchHeaders", type: { name: "Composite", @@ -18062,7 +19122,7 @@ const ContainerSubmitBatchHeaders = { } } }; -const ContainerSubmitBatchExceptionHeaders = { +var ContainerSubmitBatchExceptionHeaders = { serializedName: "Container_submitBatchExceptionHeaders", type: { name: "Composite", @@ -18078,7 +19138,7 @@ const ContainerSubmitBatchExceptionHeaders = { } } }; -const ContainerAcquireLeaseHeaders = { +var ContainerAcquireLeaseHeaders = { serializedName: "Container_acquireLeaseHeaders", type: { name: "Composite", @@ -18136,7 +19196,7 @@ const ContainerAcquireLeaseHeaders = { } } }; -const ContainerAcquireLeaseExceptionHeaders = { +var ContainerAcquireLeaseExceptionHeaders = { serializedName: "Container_acquireLeaseExceptionHeaders", type: { name: "Composite", @@ -18152,7 +19212,7 @@ const ContainerAcquireLeaseExceptionHeaders = { } } }; -const ContainerReleaseLeaseHeaders = { +var ContainerReleaseLeaseHeaders = { serializedName: "Container_releaseLeaseHeaders", type: { name: "Composite", @@ -18203,7 +19263,7 @@ const ContainerReleaseLeaseHeaders = { } } }; -const ContainerReleaseLeaseExceptionHeaders = { +var ContainerReleaseLeaseExceptionHeaders = { serializedName: "Container_releaseLeaseExceptionHeaders", type: { name: "Composite", @@ -18219,7 +19279,7 @@ const ContainerReleaseLeaseExceptionHeaders = { } } }; -const ContainerRenewLeaseHeaders = { +var ContainerRenewLeaseHeaders = { serializedName: "Container_renewLeaseHeaders", type: { name: "Composite", @@ -18277,7 +19337,7 @@ const ContainerRenewLeaseHeaders = { } } }; -const ContainerRenewLeaseExceptionHeaders = { +var ContainerRenewLeaseExceptionHeaders = { serializedName: "Container_renewLeaseExceptionHeaders", type: { name: "Composite", @@ -18293,7 +19353,7 @@ const ContainerRenewLeaseExceptionHeaders = { } } }; -const ContainerBreakLeaseHeaders = { +var ContainerBreakLeaseHeaders = { serializedName: "Container_breakLeaseHeaders", type: { name: "Composite", @@ -18351,7 +19411,7 @@ const ContainerBreakLeaseHeaders = { } } }; -const ContainerBreakLeaseExceptionHeaders = { +var ContainerBreakLeaseExceptionHeaders = { serializedName: "Container_breakLeaseExceptionHeaders", type: { name: "Composite", @@ -18367,7 +19427,7 @@ const ContainerBreakLeaseExceptionHeaders = { } } }; -const ContainerChangeLeaseHeaders = { +var ContainerChangeLeaseHeaders = { serializedName: "Container_changeLeaseHeaders", type: { name: "Composite", @@ -18425,7 +19485,7 @@ const ContainerChangeLeaseHeaders = { } } }; -const ContainerChangeLeaseExceptionHeaders = { +var ContainerChangeLeaseExceptionHeaders = { serializedName: "Container_changeLeaseExceptionHeaders", type: { name: "Composite", @@ -18441,7 +19501,7 @@ const ContainerChangeLeaseExceptionHeaders = { } } }; -const ContainerListBlobFlatSegmentHeaders = { +var ContainerListBlobFlatSegmentHeaders = { serializedName: "Container_listBlobFlatSegmentHeaders", type: { name: "Composite", @@ -18492,7 +19552,7 @@ const ContainerListBlobFlatSegmentHeaders = { } } }; -const ContainerListBlobFlatSegmentExceptionHeaders = { +var ContainerListBlobFlatSegmentExceptionHeaders = { serializedName: "Container_listBlobFlatSegmentExceptionHeaders", type: { name: "Composite", @@ -18508,7 +19568,7 @@ const ContainerListBlobFlatSegmentExceptionHeaders = { } } }; -const ContainerListBlobHierarchySegmentHeaders = { +var ContainerListBlobHierarchySegmentHeaders = { serializedName: "Container_listBlobHierarchySegmentHeaders", type: { name: "Composite", @@ -18559,7 +19619,7 @@ const ContainerListBlobHierarchySegmentHeaders = { } } }; -const ContainerListBlobHierarchySegmentExceptionHeaders = { +var ContainerListBlobHierarchySegmentExceptionHeaders = { serializedName: "Container_listBlobHierarchySegmentExceptionHeaders", type: { name: "Composite", @@ -18575,7 +19635,7 @@ const ContainerListBlobHierarchySegmentExceptionHeaders = { } } }; -const ContainerGetAccountInfoHeaders = { +var ContainerGetAccountInfoHeaders = { serializedName: "Container_getAccountInfoHeaders", type: { name: "Composite", @@ -18640,7 +19700,7 @@ const ContainerGetAccountInfoHeaders = { } } }; -const ContainerGetAccountInfoExceptionHeaders = { +var ContainerGetAccountInfoExceptionHeaders = { serializedName: "Container_getAccountInfoExceptionHeaders", type: { name: "Composite", @@ -18656,7 +19716,7 @@ const ContainerGetAccountInfoExceptionHeaders = { } } }; -const DirectoryCreateHeaders = { +var DirectoryCreateHeaders = { serializedName: "Directory_createHeaders", type: { name: "Composite", @@ -18714,7 +19774,7 @@ const DirectoryCreateHeaders = { } } }; -const DirectoryCreateExceptionHeaders = { +var DirectoryCreateExceptionHeaders = { serializedName: "Directory_createExceptionHeaders", type: { name: "Composite", @@ -18744,7 +19804,7 @@ const DirectoryCreateExceptionHeaders = { } } }; -const DirectoryRenameHeaders = { +var DirectoryRenameHeaders = { serializedName: "Directory_renameHeaders", type: { name: "Composite", @@ -18809,7 +19869,7 @@ const DirectoryRenameHeaders = { } } }; -const DirectoryRenameExceptionHeaders = { +var DirectoryRenameExceptionHeaders = { serializedName: "Directory_renameExceptionHeaders", type: { name: "Composite", @@ -18839,7 +19899,7 @@ const DirectoryRenameExceptionHeaders = { } } }; -const DirectoryDeleteHeaders = { +var DirectoryDeleteHeaders = { serializedName: "Directory_deleteHeaders", type: { name: "Composite", @@ -18883,7 +19943,7 @@ const DirectoryDeleteHeaders = { } } }; -const DirectoryDeleteExceptionHeaders = { +var DirectoryDeleteExceptionHeaders = { serializedName: "Directory_deleteExceptionHeaders", type: { name: "Composite", @@ -18913,7 +19973,7 @@ const DirectoryDeleteExceptionHeaders = { } } }; -const DirectorySetAccessControlHeaders = { +var DirectorySetAccessControlHeaders = { serializedName: "Directory_setAccessControlHeaders", type: { name: "Composite", @@ -18957,7 +20017,7 @@ const DirectorySetAccessControlHeaders = { } } }; -const DirectorySetAccessControlExceptionHeaders = { +var DirectorySetAccessControlExceptionHeaders = { serializedName: "Directory_setAccessControlExceptionHeaders", type: { name: "Composite", @@ -18987,7 +20047,7 @@ const DirectorySetAccessControlExceptionHeaders = { } } }; -const DirectoryGetAccessControlHeaders = { +var DirectoryGetAccessControlHeaders = { serializedName: "Directory_getAccessControlHeaders", type: { name: "Composite", @@ -19059,7 +20119,7 @@ const DirectoryGetAccessControlHeaders = { } } }; -const DirectoryGetAccessControlExceptionHeaders = { +var DirectoryGetAccessControlExceptionHeaders = { serializedName: "Directory_getAccessControlExceptionHeaders", type: { name: "Composite", @@ -19089,7 +20149,7 @@ const DirectoryGetAccessControlExceptionHeaders = { } } }; -const BlobDownloadHeaders = { +var BlobDownloadHeaders = { serializedName: "Blob_downloadHeaders", type: { name: "Composite", @@ -19400,7 +20460,7 @@ const BlobDownloadHeaders = { } } }; -const BlobDownloadExceptionHeaders = { +var BlobDownloadExceptionHeaders = { serializedName: "Blob_downloadExceptionHeaders", type: { name: "Composite", @@ -19416,7 +20476,7 @@ const BlobDownloadExceptionHeaders = { } } }; -const BlobGetPropertiesHeaders = { +var BlobGetPropertiesHeaders = { serializedName: "Blob_getPropertiesHeaders", type: { name: "Composite", @@ -19770,7 +20830,7 @@ const BlobGetPropertiesHeaders = { } } }; -const BlobGetPropertiesExceptionHeaders = { +var BlobGetPropertiesExceptionHeaders = { serializedName: "Blob_getPropertiesExceptionHeaders", type: { name: "Composite", @@ -19786,7 +20846,7 @@ const BlobGetPropertiesExceptionHeaders = { } } }; -const BlobDeleteHeaders = { +var BlobDeleteHeaders = { serializedName: "Blob_deleteHeaders", type: { name: "Composite", @@ -19830,7 +20890,7 @@ const BlobDeleteHeaders = { } } }; -const BlobDeleteExceptionHeaders = { +var BlobDeleteExceptionHeaders = { serializedName: "Blob_deleteExceptionHeaders", type: { name: "Composite", @@ -19846,7 +20906,7 @@ const BlobDeleteExceptionHeaders = { } } }; -const BlobSetAccessControlHeaders = { +var BlobSetAccessControlHeaders = { serializedName: "Blob_setAccessControlHeaders", type: { name: "Composite", @@ -19890,7 +20950,7 @@ const BlobSetAccessControlHeaders = { } } }; -const BlobSetAccessControlExceptionHeaders = { +var BlobSetAccessControlExceptionHeaders = { serializedName: "Blob_setAccessControlExceptionHeaders", type: { name: "Composite", @@ -19920,7 +20980,7 @@ const BlobSetAccessControlExceptionHeaders = { } } }; -const BlobGetAccessControlHeaders = { +var BlobGetAccessControlHeaders = { serializedName: "Blob_getAccessControlHeaders", type: { name: "Composite", @@ -19992,7 +21052,7 @@ const BlobGetAccessControlHeaders = { } } }; -const BlobGetAccessControlExceptionHeaders = { +var BlobGetAccessControlExceptionHeaders = { serializedName: "Blob_getAccessControlExceptionHeaders", type: { name: "Composite", @@ -20022,7 +21082,7 @@ const BlobGetAccessControlExceptionHeaders = { } } }; -const BlobRenameHeaders = { +var BlobRenameHeaders = { serializedName: "Blob_renameHeaders", type: { name: "Composite", @@ -20080,7 +21140,7 @@ const BlobRenameHeaders = { } } }; -const BlobRenameExceptionHeaders = { +var BlobRenameExceptionHeaders = { serializedName: "Blob_renameExceptionHeaders", type: { name: "Composite", @@ -20110,7 +21170,7 @@ const BlobRenameExceptionHeaders = { } } }; -const BlobUndeleteHeaders = { +var BlobUndeleteHeaders = { serializedName: "Blob_undeleteHeaders", type: { name: "Composite", @@ -20154,7 +21214,7 @@ const BlobUndeleteHeaders = { } } }; -const BlobUndeleteExceptionHeaders = { +var BlobUndeleteExceptionHeaders = { serializedName: "Blob_undeleteExceptionHeaders", type: { name: "Composite", @@ -20170,7 +21230,7 @@ const BlobUndeleteExceptionHeaders = { } } }; -const BlobSetExpiryHeaders = { +var BlobSetExpiryHeaders = { serializedName: "Blob_setExpiryHeaders", type: { name: "Composite", @@ -20221,7 +21281,7 @@ const BlobSetExpiryHeaders = { } } }; -const BlobSetExpiryExceptionHeaders = { +var BlobSetExpiryExceptionHeaders = { serializedName: "Blob_setExpiryExceptionHeaders", type: { name: "Composite", @@ -20237,7 +21297,7 @@ const BlobSetExpiryExceptionHeaders = { } } }; -const BlobSetHttpHeadersHeaders = { +var BlobSetHttpHeadersHeaders = { serializedName: "Blob_setHttpHeadersHeaders", type: { name: "Composite", @@ -20302,7 +21362,7 @@ const BlobSetHttpHeadersHeaders = { } } }; -const BlobSetHttpHeadersExceptionHeaders = { +var BlobSetHttpHeadersExceptionHeaders = { serializedName: "Blob_setHttpHeadersExceptionHeaders", type: { name: "Composite", @@ -20318,7 +21378,7 @@ const BlobSetHttpHeadersExceptionHeaders = { } } }; -const BlobSetMetadataHeaders = { +var BlobSetMetadataHeaders = { serializedName: "Blob_setMetadataHeaders", type: { name: "Composite", @@ -20404,7 +21464,7 @@ const BlobSetMetadataHeaders = { } } }; -const BlobSetMetadataExceptionHeaders = { +var BlobSetMetadataExceptionHeaders = { serializedName: "Blob_setMetadataExceptionHeaders", type: { name: "Composite", @@ -20420,7 +21480,7 @@ const BlobSetMetadataExceptionHeaders = { } } }; -const BlobAcquireLeaseHeaders = { +var BlobAcquireLeaseHeaders = { serializedName: "Blob_acquireLeaseHeaders", type: { name: "Composite", @@ -20478,7 +21538,7 @@ const BlobAcquireLeaseHeaders = { } } }; -const BlobAcquireLeaseExceptionHeaders = { +var BlobAcquireLeaseExceptionHeaders = { serializedName: "Blob_acquireLeaseExceptionHeaders", type: { name: "Composite", @@ -20494,7 +21554,7 @@ const BlobAcquireLeaseExceptionHeaders = { } } }; -const BlobReleaseLeaseHeaders = { +var BlobReleaseLeaseHeaders = { serializedName: "Blob_releaseLeaseHeaders", type: { name: "Composite", @@ -20545,7 +21605,7 @@ const BlobReleaseLeaseHeaders = { } } }; -const BlobReleaseLeaseExceptionHeaders = { +var BlobReleaseLeaseExceptionHeaders = { serializedName: "Blob_releaseLeaseExceptionHeaders", type: { name: "Composite", @@ -20561,7 +21621,7 @@ const BlobReleaseLeaseExceptionHeaders = { } } }; -const BlobRenewLeaseHeaders = { +var BlobRenewLeaseHeaders = { serializedName: "Blob_renewLeaseHeaders", type: { name: "Composite", @@ -20619,7 +21679,7 @@ const BlobRenewLeaseHeaders = { } } }; -const BlobRenewLeaseExceptionHeaders = { +var BlobRenewLeaseExceptionHeaders = { serializedName: "Blob_renewLeaseExceptionHeaders", type: { name: "Composite", @@ -20635,7 +21695,7 @@ const BlobRenewLeaseExceptionHeaders = { } } }; -const BlobChangeLeaseHeaders = { +var BlobChangeLeaseHeaders = { serializedName: "Blob_changeLeaseHeaders", type: { name: "Composite", @@ -20693,7 +21753,7 @@ const BlobChangeLeaseHeaders = { } } }; -const BlobChangeLeaseExceptionHeaders = { +var BlobChangeLeaseExceptionHeaders = { serializedName: "Blob_changeLeaseExceptionHeaders", type: { name: "Composite", @@ -20709,7 +21769,7 @@ const BlobChangeLeaseExceptionHeaders = { } } }; -const BlobBreakLeaseHeaders = { +var BlobBreakLeaseHeaders = { serializedName: "Blob_breakLeaseHeaders", type: { name: "Composite", @@ -20767,7 +21827,7 @@ const BlobBreakLeaseHeaders = { } } }; -const BlobBreakLeaseExceptionHeaders = { +var BlobBreakLeaseExceptionHeaders = { serializedName: "Blob_breakLeaseExceptionHeaders", type: { name: "Composite", @@ -20783,7 +21843,7 @@ const BlobBreakLeaseExceptionHeaders = { } } }; -const BlobCreateSnapshotHeaders = { +var BlobCreateSnapshotHeaders = { serializedName: "Blob_createSnapshotHeaders", type: { name: "Composite", @@ -20862,7 +21922,7 @@ const BlobCreateSnapshotHeaders = { } } }; -const BlobCreateSnapshotExceptionHeaders = { +var BlobCreateSnapshotExceptionHeaders = { serializedName: "Blob_createSnapshotExceptionHeaders", type: { name: "Composite", @@ -20878,7 +21938,7 @@ const BlobCreateSnapshotExceptionHeaders = { } } }; -const BlobStartCopyFromURLHeaders = { +var BlobStartCopyFromURLHeaders = { serializedName: "Blob_startCopyFromURLHeaders", type: { name: "Composite", @@ -20958,7 +22018,7 @@ const BlobStartCopyFromURLHeaders = { } } }; -const BlobStartCopyFromURLExceptionHeaders = { +var BlobStartCopyFromURLExceptionHeaders = { serializedName: "Blob_startCopyFromURLExceptionHeaders", type: { name: "Composite", @@ -20974,7 +22034,7 @@ const BlobStartCopyFromURLExceptionHeaders = { } } }; -const BlobCopyFromURLHeaders = { +var BlobCopyFromURLHeaders = { serializedName: "Blob_copyFromURLHeaders", type: { name: "Composite", @@ -21068,7 +22128,7 @@ const BlobCopyFromURLHeaders = { } } }; -const BlobCopyFromURLExceptionHeaders = { +var BlobCopyFromURLExceptionHeaders = { serializedName: "Blob_copyFromURLExceptionHeaders", type: { name: "Composite", @@ -21084,7 +22144,7 @@ const BlobCopyFromURLExceptionHeaders = { } } }; -const BlobAbortCopyFromURLHeaders = { +var BlobAbortCopyFromURLHeaders = { serializedName: "Blob_abortCopyFromURLHeaders", type: { name: "Composite", @@ -21128,7 +22188,7 @@ const BlobAbortCopyFromURLHeaders = { } } }; -const BlobAbortCopyFromURLExceptionHeaders = { +var BlobAbortCopyFromURLExceptionHeaders = { serializedName: "Blob_abortCopyFromURLExceptionHeaders", type: { name: "Composite", @@ -21144,7 +22204,7 @@ const BlobAbortCopyFromURLExceptionHeaders = { } } }; -const BlobSetTierHeaders = { +var BlobSetTierHeaders = { serializedName: "Blob_setTierHeaders", type: { name: "Composite", @@ -21181,7 +22241,7 @@ const BlobSetTierHeaders = { } } }; -const BlobSetTierExceptionHeaders = { +var BlobSetTierExceptionHeaders = { serializedName: "Blob_setTierExceptionHeaders", type: { name: "Composite", @@ -21197,7 +22257,7 @@ const BlobSetTierExceptionHeaders = { } } }; -const BlobGetAccountInfoHeaders = { +var BlobGetAccountInfoHeaders = { serializedName: "Blob_getAccountInfoHeaders", type: { name: "Composite", @@ -21262,7 +22322,7 @@ const BlobGetAccountInfoHeaders = { } } }; -const BlobGetAccountInfoExceptionHeaders = { +var BlobGetAccountInfoExceptionHeaders = { serializedName: "Blob_getAccountInfoExceptionHeaders", type: { name: "Composite", @@ -21278,7 +22338,7 @@ const BlobGetAccountInfoExceptionHeaders = { } } }; -const BlobQueryHeaders = { +var BlobQueryHeaders = { serializedName: "Blob_queryHeaders", type: { name: "Composite", @@ -21537,7 +22597,7 @@ const BlobQueryHeaders = { } } }; -const BlobQueryExceptionHeaders = { +var BlobQueryExceptionHeaders = { serializedName: "Blob_queryExceptionHeaders", type: { name: "Composite", @@ -21553,7 +22613,7 @@ const BlobQueryExceptionHeaders = { } } }; -const BlobGetTagsHeaders = { +var BlobGetTagsHeaders = { serializedName: "Blob_getTagsHeaders", type: { name: "Composite", @@ -21597,7 +22657,7 @@ const BlobGetTagsHeaders = { } } }; -const BlobGetTagsExceptionHeaders = { +var BlobGetTagsExceptionHeaders = { serializedName: "Blob_getTagsExceptionHeaders", type: { name: "Composite", @@ -21613,7 +22673,7 @@ const BlobGetTagsExceptionHeaders = { } } }; -const BlobSetTagsHeaders = { +var BlobSetTagsHeaders = { serializedName: "Blob_setTagsHeaders", type: { name: "Composite", @@ -21657,7 +22717,7 @@ const BlobSetTagsHeaders = { } } }; -const BlobSetTagsExceptionHeaders = { +var BlobSetTagsExceptionHeaders = { serializedName: "Blob_setTagsExceptionHeaders", type: { name: "Composite", @@ -21673,7 +22733,7 @@ const BlobSetTagsExceptionHeaders = { } } }; -const PageBlobCreateHeaders = { +var PageBlobCreateHeaders = { serializedName: "PageBlob_createHeaders", type: { name: "Composite", @@ -21766,7 +22826,7 @@ const PageBlobCreateHeaders = { } } }; -const PageBlobCreateExceptionHeaders = { +var PageBlobCreateExceptionHeaders = { serializedName: "PageBlob_createExceptionHeaders", type: { name: "Composite", @@ -21782,7 +22842,7 @@ const PageBlobCreateExceptionHeaders = { } } }; -const PageBlobUploadPagesHeaders = { +var PageBlobUploadPagesHeaders = { serializedName: "PageBlob_uploadPagesHeaders", type: { name: "Composite", @@ -21882,7 +22942,7 @@ const PageBlobUploadPagesHeaders = { } } }; -const PageBlobUploadPagesExceptionHeaders = { +var PageBlobUploadPagesExceptionHeaders = { serializedName: "PageBlob_uploadPagesExceptionHeaders", type: { name: "Composite", @@ -21898,7 +22958,7 @@ const PageBlobUploadPagesExceptionHeaders = { } } }; -const PageBlobClearPagesHeaders = { +var PageBlobClearPagesHeaders = { serializedName: "PageBlob_clearPagesHeaders", type: { name: "Composite", @@ -21977,7 +23037,7 @@ const PageBlobClearPagesHeaders = { } } }; -const PageBlobClearPagesExceptionHeaders = { +var PageBlobClearPagesExceptionHeaders = { serializedName: "PageBlob_clearPagesExceptionHeaders", type: { name: "Composite", @@ -21993,7 +23053,7 @@ const PageBlobClearPagesExceptionHeaders = { } } }; -const PageBlobUploadPagesFromURLHeaders = { +var PageBlobUploadPagesFromURLHeaders = { serializedName: "PageBlob_uploadPagesFromURLHeaders", type: { name: "Composite", @@ -22086,7 +23146,7 @@ const PageBlobUploadPagesFromURLHeaders = { } } }; -const PageBlobUploadPagesFromURLExceptionHeaders = { +var PageBlobUploadPagesFromURLExceptionHeaders = { serializedName: "PageBlob_uploadPagesFromURLExceptionHeaders", type: { name: "Composite", @@ -22102,7 +23162,7 @@ const PageBlobUploadPagesFromURLExceptionHeaders = { } } }; -const PageBlobGetPageRangesHeaders = { +var PageBlobGetPageRangesHeaders = { serializedName: "PageBlob_getPageRangesHeaders", type: { name: "Composite", @@ -22167,7 +23227,7 @@ const PageBlobGetPageRangesHeaders = { } } }; -const PageBlobGetPageRangesExceptionHeaders = { +var PageBlobGetPageRangesExceptionHeaders = { serializedName: "PageBlob_getPageRangesExceptionHeaders", type: { name: "Composite", @@ -22183,7 +23243,7 @@ const PageBlobGetPageRangesExceptionHeaders = { } } }; -const PageBlobGetPageRangesDiffHeaders = { +var PageBlobGetPageRangesDiffHeaders = { serializedName: "PageBlob_getPageRangesDiffHeaders", type: { name: "Composite", @@ -22248,7 +23308,7 @@ const PageBlobGetPageRangesDiffHeaders = { } } }; -const PageBlobGetPageRangesDiffExceptionHeaders = { +var PageBlobGetPageRangesDiffExceptionHeaders = { serializedName: "PageBlob_getPageRangesDiffExceptionHeaders", type: { name: "Composite", @@ -22264,7 +23324,7 @@ const PageBlobGetPageRangesDiffExceptionHeaders = { } } }; -const PageBlobResizeHeaders = { +var PageBlobResizeHeaders = { serializedName: "PageBlob_resizeHeaders", type: { name: "Composite", @@ -22329,7 +23389,7 @@ const PageBlobResizeHeaders = { } } }; -const PageBlobResizeExceptionHeaders = { +var PageBlobResizeExceptionHeaders = { serializedName: "PageBlob_resizeExceptionHeaders", type: { name: "Composite", @@ -22345,7 +23405,7 @@ const PageBlobResizeExceptionHeaders = { } } }; -const PageBlobUpdateSequenceNumberHeaders = { +var PageBlobUpdateSequenceNumberHeaders = { serializedName: "PageBlob_updateSequenceNumberHeaders", type: { name: "Composite", @@ -22410,7 +23470,7 @@ const PageBlobUpdateSequenceNumberHeaders = { } } }; -const PageBlobUpdateSequenceNumberExceptionHeaders = { +var PageBlobUpdateSequenceNumberExceptionHeaders = { serializedName: "PageBlob_updateSequenceNumberExceptionHeaders", type: { name: "Composite", @@ -22426,7 +23486,7 @@ const PageBlobUpdateSequenceNumberExceptionHeaders = { } } }; -const PageBlobCopyIncrementalHeaders = { +var PageBlobCopyIncrementalHeaders = { serializedName: "PageBlob_copyIncrementalHeaders", type: { name: "Composite", @@ -22499,7 +23559,7 @@ const PageBlobCopyIncrementalHeaders = { } } }; -const PageBlobCopyIncrementalExceptionHeaders = { +var PageBlobCopyIncrementalExceptionHeaders = { serializedName: "PageBlob_copyIncrementalExceptionHeaders", type: { name: "Composite", @@ -22515,7 +23575,7 @@ const PageBlobCopyIncrementalExceptionHeaders = { } } }; -const AppendBlobCreateHeaders = { +var AppendBlobCreateHeaders = { serializedName: "AppendBlob_createHeaders", type: { name: "Composite", @@ -22608,7 +23668,7 @@ const AppendBlobCreateHeaders = { } } }; -const AppendBlobCreateExceptionHeaders = { +var AppendBlobCreateExceptionHeaders = { serializedName: "AppendBlob_createExceptionHeaders", type: { name: "Composite", @@ -22624,7 +23684,7 @@ const AppendBlobCreateExceptionHeaders = { } } }; -const AppendBlobAppendBlockHeaders = { +var AppendBlobAppendBlockHeaders = { serializedName: "AppendBlob_appendBlockHeaders", type: { name: "Composite", @@ -22731,7 +23791,7 @@ const AppendBlobAppendBlockHeaders = { } } }; -const AppendBlobAppendBlockExceptionHeaders = { +var AppendBlobAppendBlockExceptionHeaders = { serializedName: "AppendBlob_appendBlockExceptionHeaders", type: { name: "Composite", @@ -22747,7 +23807,7 @@ const AppendBlobAppendBlockExceptionHeaders = { } } }; -const AppendBlobAppendBlockFromUrlHeaders = { +var AppendBlobAppendBlockFromUrlHeaders = { serializedName: "AppendBlob_appendBlockFromUrlHeaders", type: { name: "Composite", @@ -22847,7 +23907,7 @@ const AppendBlobAppendBlockFromUrlHeaders = { } } }; -const AppendBlobAppendBlockFromUrlExceptionHeaders = { +var AppendBlobAppendBlockFromUrlExceptionHeaders = { serializedName: "AppendBlob_appendBlockFromUrlExceptionHeaders", type: { name: "Composite", @@ -22863,7 +23923,7 @@ const AppendBlobAppendBlockFromUrlExceptionHeaders = { } } }; -const AppendBlobSealHeaders = { +var AppendBlobSealHeaders = { serializedName: "AppendBlob_sealHeaders", type: { name: "Composite", @@ -22921,7 +23981,7 @@ const AppendBlobSealHeaders = { } } }; -const AppendBlobSealExceptionHeaders = { +var AppendBlobSealExceptionHeaders = { serializedName: "AppendBlob_sealExceptionHeaders", type: { name: "Composite", @@ -22937,7 +23997,7 @@ const AppendBlobSealExceptionHeaders = { } } }; -const BlockBlobUploadHeaders = { +var BlockBlobUploadHeaders = { serializedName: "BlockBlob_uploadHeaders", type: { name: "Composite", @@ -23030,7 +24090,7 @@ const BlockBlobUploadHeaders = { } } }; -const BlockBlobUploadExceptionHeaders = { +var BlockBlobUploadExceptionHeaders = { serializedName: "BlockBlob_uploadExceptionHeaders", type: { name: "Composite", @@ -23046,7 +24106,7 @@ const BlockBlobUploadExceptionHeaders = { } } }; -const BlockBlobPutBlobFromUrlHeaders = { +var BlockBlobPutBlobFromUrlHeaders = { serializedName: "BlockBlob_putBlobFromUrlHeaders", type: { name: "Composite", @@ -23139,7 +24199,7 @@ const BlockBlobPutBlobFromUrlHeaders = { } } }; -const BlockBlobPutBlobFromUrlExceptionHeaders = { +var BlockBlobPutBlobFromUrlExceptionHeaders = { serializedName: "BlockBlob_putBlobFromUrlExceptionHeaders", type: { name: "Composite", @@ -23155,7 +24215,7 @@ const BlockBlobPutBlobFromUrlExceptionHeaders = { } } }; -const BlockBlobStageBlockHeaders = { +var BlockBlobStageBlockHeaders = { serializedName: "BlockBlob_stageBlockHeaders", type: { name: "Composite", @@ -23234,7 +24294,7 @@ const BlockBlobStageBlockHeaders = { } } }; -const BlockBlobStageBlockExceptionHeaders = { +var BlockBlobStageBlockExceptionHeaders = { serializedName: "BlockBlob_stageBlockExceptionHeaders", type: { name: "Composite", @@ -23250,7 +24310,7 @@ const BlockBlobStageBlockExceptionHeaders = { } } }; -const BlockBlobStageBlockFromURLHeaders = { +var BlockBlobStageBlockFromURLHeaders = { serializedName: "BlockBlob_stageBlockFromURLHeaders", type: { name: "Composite", @@ -23329,7 +24389,7 @@ const BlockBlobStageBlockFromURLHeaders = { } } }; -const BlockBlobStageBlockFromURLExceptionHeaders = { +var BlockBlobStageBlockFromURLExceptionHeaders = { serializedName: "BlockBlob_stageBlockFromURLExceptionHeaders", type: { name: "Composite", @@ -23345,7 +24405,7 @@ const BlockBlobStageBlockFromURLExceptionHeaders = { } } }; -const BlockBlobCommitBlockListHeaders = { +var BlockBlobCommitBlockListHeaders = { serializedName: "BlockBlob_commitBlockListHeaders", type: { name: "Composite", @@ -23445,7 +24505,7 @@ const BlockBlobCommitBlockListHeaders = { } } }; -const BlockBlobCommitBlockListExceptionHeaders = { +var BlockBlobCommitBlockListExceptionHeaders = { serializedName: "BlockBlob_commitBlockListExceptionHeaders", type: { name: "Composite", @@ -23461,7 +24521,7 @@ const BlockBlobCommitBlockListExceptionHeaders = { } } }; -const BlockBlobGetBlockListHeaders = { +var BlockBlobGetBlockListHeaders = { serializedName: "BlockBlob_getBlockListHeaders", type: { name: "Composite", @@ -23533,7 +24593,7 @@ const BlockBlobGetBlockListHeaders = { } } }; -const BlockBlobGetBlockListExceptionHeaders = { +var BlockBlobGetBlockListExceptionHeaders = { serializedName: "BlockBlob_getBlockListExceptionHeaders", type: { name: "Composite", @@ -23749,7 +24809,7 @@ var Mappers = /*#__PURE__*/Object.freeze({ * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -const contentType = { +var contentType = { parameterPath: ["options", "contentType"], mapper: { defaultValue: "application/xml", @@ -23760,11 +24820,11 @@ const contentType = { } } }; -const blobServiceProperties = { +var blobServiceProperties = { parameterPath: "blobServiceProperties", mapper: BlobServiceProperties }; -const accept = { +var accept = { parameterPath: "accept", mapper: { defaultValue: "application/xml", @@ -23775,7 +24835,7 @@ const accept = { } } }; -const url = { +var url = { parameterPath: "url", mapper: { serializedName: "url", @@ -23787,7 +24847,7 @@ const url = { }, skipEncoding: true }; -const restype = { +var restype = { parameterPath: "restype", mapper: { defaultValue: "service", @@ -23798,7 +24858,7 @@ const restype = { } } }; -const comp = { +var comp = { parameterPath: "comp", mapper: { defaultValue: "properties", @@ -23809,7 +24869,7 @@ const comp = { } } }; -const timeoutInSeconds = { +var timeoutInSeconds = { parameterPath: ["options", "timeoutInSeconds"], mapper: { constraints: { @@ -23822,7 +24882,7 @@ const timeoutInSeconds = { } } }; -const version = { +var version = { parameterPath: "version", mapper: { defaultValue: "2020-08-04", @@ -23833,7 +24893,7 @@ const version = { } } }; -const requestId = { +var requestId = { parameterPath: ["options", "requestId"], mapper: { serializedName: "x-ms-client-request-id", @@ -23843,7 +24903,7 @@ const requestId = { } } }; -const accept1 = { +var accept1 = { parameterPath: "accept", mapper: { defaultValue: "application/xml", @@ -23854,7 +24914,7 @@ const accept1 = { } } }; -const comp1 = { +var comp1 = { parameterPath: "comp", mapper: { defaultValue: "stats", @@ -23865,7 +24925,7 @@ const comp1 = { } } }; -const comp2 = { +var comp2 = { parameterPath: "comp", mapper: { defaultValue: "list", @@ -23876,7 +24936,7 @@ const comp2 = { } } }; -const prefix = { +var prefix = { parameterPath: ["options", "prefix"], mapper: { serializedName: "prefix", @@ -23886,7 +24946,7 @@ const prefix = { } } }; -const marker = { +var marker = { parameterPath: ["options", "marker"], mapper: { serializedName: "marker", @@ -23896,7 +24956,7 @@ const marker = { } } }; -const maxPageSize = { +var maxPageSize = { parameterPath: ["options", "maxPageSize"], mapper: { constraints: { @@ -23909,7 +24969,7 @@ const maxPageSize = { } } }; -const include = { +var include = { parameterPath: ["options", "include"], mapper: { serializedName: "include", @@ -23927,11 +24987,11 @@ const include = { }, collectionFormat: coreHttp.QueryCollectionFormat.Csv }; -const keyInfo = { +var keyInfo = { parameterPath: "keyInfo", mapper: KeyInfo }; -const comp3 = { +var comp3 = { parameterPath: "comp", mapper: { defaultValue: "userdelegationkey", @@ -23942,7 +25002,7 @@ const comp3 = { } } }; -const restype1 = { +var restype1 = { parameterPath: "restype", mapper: { defaultValue: "account", @@ -23953,7 +25013,7 @@ const restype1 = { } } }; -const body = { +var body = { parameterPath: "body", mapper: { serializedName: "body", @@ -23964,7 +25024,7 @@ const body = { } } }; -const comp4 = { +var comp4 = { parameterPath: "comp", mapper: { defaultValue: "batch", @@ -23975,7 +25035,7 @@ const comp4 = { } } }; -const contentLength = { +var contentLength = { parameterPath: "contentLength", mapper: { serializedName: "Content-Length", @@ -23986,7 +25046,7 @@ const contentLength = { } } }; -const multipartContentType = { +var multipartContentType = { parameterPath: "multipartContentType", mapper: { serializedName: "Content-Type", @@ -23997,7 +25057,7 @@ const multipartContentType = { } } }; -const comp5 = { +var comp5 = { parameterPath: "comp", mapper: { defaultValue: "blobs", @@ -24008,7 +25068,7 @@ const comp5 = { } } }; -const where = { +var where = { parameterPath: ["options", "where"], mapper: { serializedName: "where", @@ -24018,7 +25078,7 @@ const where = { } } }; -const restype2 = { +var restype2 = { parameterPath: "restype", mapper: { defaultValue: "container", @@ -24029,7 +25089,7 @@ const restype2 = { } } }; -const metadata = { +var metadata = { parameterPath: ["options", "metadata"], mapper: { serializedName: "x-ms-meta", @@ -24041,7 +25101,7 @@ const metadata = { headerCollectionPrefix: "x-ms-meta-" } }; -const access = { +var access = { parameterPath: ["options", "access"], mapper: { serializedName: "x-ms-blob-public-access", @@ -24052,7 +25112,7 @@ const access = { } } }; -const defaultEncryptionScope = { +var defaultEncryptionScope = { parameterPath: [ "options", "containerEncryptionScope", @@ -24066,7 +25126,7 @@ const defaultEncryptionScope = { } } }; -const preventEncryptionScopeOverride = { +var preventEncryptionScopeOverride = { parameterPath: [ "options", "containerEncryptionScope", @@ -24080,7 +25140,7 @@ const preventEncryptionScopeOverride = { } } }; -const leaseId = { +var leaseId = { parameterPath: ["options", "leaseAccessConditions", "leaseId"], mapper: { serializedName: "x-ms-lease-id", @@ -24090,7 +25150,7 @@ const leaseId = { } } }; -const ifModifiedSince = { +var ifModifiedSince = { parameterPath: ["options", "modifiedAccessConditions", "ifModifiedSince"], mapper: { serializedName: "If-Modified-Since", @@ -24100,7 +25160,7 @@ const ifModifiedSince = { } } }; -const ifUnmodifiedSince = { +var ifUnmodifiedSince = { parameterPath: ["options", "modifiedAccessConditions", "ifUnmodifiedSince"], mapper: { serializedName: "If-Unmodified-Since", @@ -24110,7 +25170,7 @@ const ifUnmodifiedSince = { } } }; -const comp6 = { +var comp6 = { parameterPath: "comp", mapper: { defaultValue: "metadata", @@ -24121,7 +25181,7 @@ const comp6 = { } } }; -const comp7 = { +var comp7 = { parameterPath: "comp", mapper: { defaultValue: "acl", @@ -24132,7 +25192,7 @@ const comp7 = { } } }; -const containerAcl = { +var containerAcl = { parameterPath: ["options", "containerAcl"], mapper: { serializedName: "containerAcl", @@ -24150,7 +25210,7 @@ const containerAcl = { } } }; -const comp8 = { +var comp8 = { parameterPath: "comp", mapper: { defaultValue: "undelete", @@ -24161,7 +25221,7 @@ const comp8 = { } } }; -const deletedContainerName = { +var deletedContainerName = { parameterPath: ["options", "deletedContainerName"], mapper: { serializedName: "x-ms-deleted-container-name", @@ -24171,7 +25231,7 @@ const deletedContainerName = { } } }; -const deletedContainerVersion = { +var deletedContainerVersion = { parameterPath: ["options", "deletedContainerVersion"], mapper: { serializedName: "x-ms-deleted-container-version", @@ -24181,7 +25241,7 @@ const deletedContainerVersion = { } } }; -const comp9 = { +var comp9 = { parameterPath: "comp", mapper: { defaultValue: "rename", @@ -24192,7 +25252,7 @@ const comp9 = { } } }; -const sourceContainerName = { +var sourceContainerName = { parameterPath: "sourceContainerName", mapper: { serializedName: "x-ms-source-container-name", @@ -24203,7 +25263,7 @@ const sourceContainerName = { } } }; -const sourceLeaseId = { +var sourceLeaseId = { parameterPath: ["options", "sourceLeaseId"], mapper: { serializedName: "x-ms-source-lease-id", @@ -24213,7 +25273,7 @@ const sourceLeaseId = { } } }; -const comp10 = { +var comp10 = { parameterPath: "comp", mapper: { defaultValue: "lease", @@ -24224,7 +25284,7 @@ const comp10 = { } } }; -const action = { +var action = { parameterPath: "action", mapper: { defaultValue: "acquire", @@ -24235,7 +25295,7 @@ const action = { } } }; -const duration = { +var duration = { parameterPath: ["options", "duration"], mapper: { serializedName: "x-ms-lease-duration", @@ -24245,7 +25305,7 @@ const duration = { } } }; -const proposedLeaseId = { +var proposedLeaseId = { parameterPath: ["options", "proposedLeaseId"], mapper: { serializedName: "x-ms-proposed-lease-id", @@ -24255,7 +25315,7 @@ const proposedLeaseId = { } } }; -const action1 = { +var action1 = { parameterPath: "action", mapper: { defaultValue: "release", @@ -24266,7 +25326,7 @@ const action1 = { } } }; -const leaseId1 = { +var leaseId1 = { parameterPath: "leaseId", mapper: { serializedName: "x-ms-lease-id", @@ -24277,7 +25337,7 @@ const leaseId1 = { } } }; -const action2 = { +var action2 = { parameterPath: "action", mapper: { defaultValue: "renew", @@ -24288,7 +25348,7 @@ const action2 = { } } }; -const action3 = { +var action3 = { parameterPath: "action", mapper: { defaultValue: "break", @@ -24299,7 +25359,7 @@ const action3 = { } } }; -const breakPeriod = { +var breakPeriod = { parameterPath: ["options", "breakPeriod"], mapper: { serializedName: "x-ms-lease-break-period", @@ -24309,7 +25369,7 @@ const breakPeriod = { } } }; -const action4 = { +var action4 = { parameterPath: "action", mapper: { defaultValue: "change", @@ -24320,7 +25380,7 @@ const action4 = { } } }; -const proposedLeaseId1 = { +var proposedLeaseId1 = { parameterPath: "proposedLeaseId", mapper: { serializedName: "x-ms-proposed-lease-id", @@ -24331,7 +25391,7 @@ const proposedLeaseId1 = { } } }; -const include1 = { +var include1 = { parameterPath: ["options", "include"], mapper: { serializedName: "include", @@ -24357,7 +25417,7 @@ const include1 = { }, collectionFormat: coreHttp.QueryCollectionFormat.Csv }; -const delimiter = { +var delimiter = { parameterPath: "delimiter", mapper: { serializedName: "delimiter", @@ -24368,7 +25428,7 @@ const delimiter = { } } }; -const directoryProperties = { +var directoryProperties = { parameterPath: ["options", "directoryProperties"], mapper: { serializedName: "x-ms-properties", @@ -24378,7 +25438,7 @@ const directoryProperties = { } } }; -const posixPermissions = { +var posixPermissions = { parameterPath: ["options", "posixPermissions"], mapper: { serializedName: "x-ms-permissions", @@ -24388,7 +25448,7 @@ const posixPermissions = { } } }; -const posixUmask = { +var posixUmask = { parameterPath: ["options", "posixUmask"], mapper: { serializedName: "x-ms-umask", @@ -24398,7 +25458,7 @@ const posixUmask = { } } }; -const cacheControl = { +var cacheControl = { parameterPath: ["options", "directoryHttpHeaders", "cacheControl"], mapper: { serializedName: "x-ms-cache-control", @@ -24408,7 +25468,7 @@ const cacheControl = { } } }; -const contentType1 = { +var contentType1 = { parameterPath: ["options", "directoryHttpHeaders", "contentType"], mapper: { serializedName: "x-ms-content-type", @@ -24418,7 +25478,7 @@ const contentType1 = { } } }; -const contentEncoding = { +var contentEncoding = { parameterPath: ["options", "directoryHttpHeaders", "contentEncoding"], mapper: { serializedName: "x-ms-content-encoding", @@ -24428,7 +25488,7 @@ const contentEncoding = { } } }; -const contentLanguage = { +var contentLanguage = { parameterPath: ["options", "directoryHttpHeaders", "contentLanguage"], mapper: { serializedName: "x-ms-content-language", @@ -24438,7 +25498,7 @@ const contentLanguage = { } } }; -const contentDisposition = { +var contentDisposition = { parameterPath: ["options", "directoryHttpHeaders", "contentDisposition"], mapper: { serializedName: "x-ms-content-disposition", @@ -24448,7 +25508,7 @@ const contentDisposition = { } } }; -const ifMatch = { +var ifMatch = { parameterPath: ["options", "modifiedAccessConditions", "ifMatch"], mapper: { serializedName: "If-Match", @@ -24458,7 +25518,7 @@ const ifMatch = { } } }; -const ifNoneMatch = { +var ifNoneMatch = { parameterPath: ["options", "modifiedAccessConditions", "ifNoneMatch"], mapper: { serializedName: "If-None-Match", @@ -24468,7 +25528,7 @@ const ifNoneMatch = { } } }; -const pathRenameMode = { +var pathRenameMode = { parameterPath: ["options", "pathRenameMode"], mapper: { serializedName: "mode", @@ -24479,7 +25539,7 @@ const pathRenameMode = { } } }; -const renameSource = { +var renameSource = { parameterPath: "renameSource", mapper: { serializedName: "x-ms-rename-source", @@ -24490,7 +25550,7 @@ const renameSource = { } } }; -const sourceIfModifiedSince = { +var sourceIfModifiedSince = { parameterPath: [ "options", "sourceModifiedAccessConditions", @@ -24504,7 +25564,7 @@ const sourceIfModifiedSince = { } } }; -const sourceIfUnmodifiedSince = { +var sourceIfUnmodifiedSince = { parameterPath: [ "options", "sourceModifiedAccessConditions", @@ -24518,7 +25578,7 @@ const sourceIfUnmodifiedSince = { } } }; -const sourceIfMatch = { +var sourceIfMatch = { parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfMatch"], mapper: { serializedName: "x-ms-source-if-match", @@ -24528,7 +25588,7 @@ const sourceIfMatch = { } } }; -const sourceIfNoneMatch = { +var sourceIfNoneMatch = { parameterPath: [ "options", "sourceModifiedAccessConditions", @@ -24542,7 +25602,7 @@ const sourceIfNoneMatch = { } } }; -const action5 = { +var action5 = { parameterPath: "action", mapper: { defaultValue: "setAccessControl", @@ -24553,7 +25613,7 @@ const action5 = { } } }; -const owner = { +var owner = { parameterPath: ["options", "owner"], mapper: { serializedName: "x-ms-owner", @@ -24563,7 +25623,7 @@ const owner = { } } }; -const group = { +var group = { parameterPath: ["options", "group"], mapper: { serializedName: "x-ms-group", @@ -24573,7 +25633,7 @@ const group = { } } }; -const posixAcl = { +var posixAcl = { parameterPath: ["options", "posixAcl"], mapper: { serializedName: "x-ms-acl", @@ -24583,7 +25643,7 @@ const posixAcl = { } } }; -const action6 = { +var action6 = { parameterPath: "action", mapper: { defaultValue: "getAccessControl", @@ -24594,7 +25654,7 @@ const action6 = { } } }; -const upn = { +var upn = { parameterPath: ["options", "upn"], mapper: { serializedName: "upn", @@ -24604,7 +25664,7 @@ const upn = { } } }; -const snapshot = { +var snapshot = { parameterPath: ["options", "snapshot"], mapper: { serializedName: "snapshot", @@ -24614,7 +25674,7 @@ const snapshot = { } } }; -const versionId = { +var versionId = { parameterPath: ["options", "versionId"], mapper: { serializedName: "versionid", @@ -24624,7 +25684,7 @@ const versionId = { } } }; -const range = { +var range = { parameterPath: ["options", "range"], mapper: { serializedName: "x-ms-range", @@ -24634,7 +25694,7 @@ const range = { } } }; -const rangeGetContentMD5 = { +var rangeGetContentMD5 = { parameterPath: ["options", "rangeGetContentMD5"], mapper: { serializedName: "x-ms-range-get-content-md5", @@ -24644,7 +25704,7 @@ const rangeGetContentMD5 = { } } }; -const rangeGetContentCRC64 = { +var rangeGetContentCRC64 = { parameterPath: ["options", "rangeGetContentCRC64"], mapper: { serializedName: "x-ms-range-get-content-crc64", @@ -24654,7 +25714,7 @@ const rangeGetContentCRC64 = { } } }; -const encryptionKey = { +var encryptionKey = { parameterPath: ["options", "cpkInfo", "encryptionKey"], mapper: { serializedName: "x-ms-encryption-key", @@ -24664,7 +25724,7 @@ const encryptionKey = { } } }; -const encryptionKeySha256 = { +var encryptionKeySha256 = { parameterPath: ["options", "cpkInfo", "encryptionKeySha256"], mapper: { serializedName: "x-ms-encryption-key-sha256", @@ -24674,7 +25734,7 @@ const encryptionKeySha256 = { } } }; -const encryptionAlgorithm = { +var encryptionAlgorithm = { parameterPath: ["options", "encryptionAlgorithm"], mapper: { defaultValue: "AES256", @@ -24685,7 +25745,7 @@ const encryptionAlgorithm = { } } }; -const ifTags = { +var ifTags = { parameterPath: ["options", "modifiedAccessConditions", "ifTags"], mapper: { serializedName: "x-ms-if-tags", @@ -24695,7 +25755,7 @@ const ifTags = { } } }; -const deleteSnapshots = { +var deleteSnapshots = { parameterPath: ["options", "deleteSnapshots"], mapper: { serializedName: "x-ms-delete-snapshots", @@ -24706,7 +25766,7 @@ const deleteSnapshots = { } } }; -const blobDeleteType = { +var blobDeleteType = { parameterPath: ["options", "blobDeleteType"], mapper: { serializedName: "deletetype", @@ -24716,7 +25776,7 @@ const blobDeleteType = { } } }; -const comp11 = { +var comp11 = { parameterPath: "comp", mapper: { defaultValue: "expiry", @@ -24727,7 +25787,7 @@ const comp11 = { } } }; -const expiryOptions = { +var expiryOptions = { parameterPath: "expiryOptions", mapper: { serializedName: "x-ms-expiry-option", @@ -24738,7 +25798,7 @@ const expiryOptions = { } } }; -const expiresOn = { +var expiresOn = { parameterPath: ["options", "expiresOn"], mapper: { serializedName: "x-ms-expiry-time", @@ -24748,7 +25808,7 @@ const expiresOn = { } } }; -const blobCacheControl = { +var blobCacheControl = { parameterPath: ["options", "blobHttpHeaders", "blobCacheControl"], mapper: { serializedName: "x-ms-blob-cache-control", @@ -24758,7 +25818,7 @@ const blobCacheControl = { } } }; -const blobContentType = { +var blobContentType = { parameterPath: ["options", "blobHttpHeaders", "blobContentType"], mapper: { serializedName: "x-ms-blob-content-type", @@ -24768,7 +25828,7 @@ const blobContentType = { } } }; -const blobContentMD5 = { +var blobContentMD5 = { parameterPath: ["options", "blobHttpHeaders", "blobContentMD5"], mapper: { serializedName: "x-ms-blob-content-md5", @@ -24778,7 +25838,7 @@ const blobContentMD5 = { } } }; -const blobContentEncoding = { +var blobContentEncoding = { parameterPath: ["options", "blobHttpHeaders", "blobContentEncoding"], mapper: { serializedName: "x-ms-blob-content-encoding", @@ -24788,7 +25848,7 @@ const blobContentEncoding = { } } }; -const blobContentLanguage = { +var blobContentLanguage = { parameterPath: ["options", "blobHttpHeaders", "blobContentLanguage"], mapper: { serializedName: "x-ms-blob-content-language", @@ -24798,7 +25858,7 @@ const blobContentLanguage = { } } }; -const blobContentDisposition = { +var blobContentDisposition = { parameterPath: ["options", "blobHttpHeaders", "blobContentDisposition"], mapper: { serializedName: "x-ms-blob-content-disposition", @@ -24808,7 +25868,7 @@ const blobContentDisposition = { } } }; -const encryptionScope = { +var encryptionScope = { parameterPath: ["options", "encryptionScope"], mapper: { serializedName: "x-ms-encryption-scope", @@ -24818,7 +25878,7 @@ const encryptionScope = { } } }; -const comp12 = { +var comp12 = { parameterPath: "comp", mapper: { defaultValue: "snapshot", @@ -24829,7 +25889,7 @@ const comp12 = { } } }; -const tier = { +var tier = { parameterPath: ["options", "tier"], mapper: { serializedName: "x-ms-access-tier", @@ -24855,7 +25915,7 @@ const tier = { } } }; -const rehydratePriority = { +var rehydratePriority = { parameterPath: ["options", "rehydratePriority"], mapper: { serializedName: "x-ms-rehydrate-priority", @@ -24866,7 +25926,7 @@ const rehydratePriority = { } } }; -const sourceIfTags = { +var sourceIfTags = { parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfTags"], mapper: { serializedName: "x-ms-source-if-tags", @@ -24876,7 +25936,7 @@ const sourceIfTags = { } } }; -const copySource = { +var copySource = { parameterPath: "copySource", mapper: { serializedName: "x-ms-copy-source", @@ -24887,7 +25947,7 @@ const copySource = { } } }; -const blobTagsString = { +var blobTagsString = { parameterPath: ["options", "blobTagsString"], mapper: { serializedName: "x-ms-tags", @@ -24897,7 +25957,7 @@ const blobTagsString = { } } }; -const sealBlob = { +var sealBlob = { parameterPath: ["options", "sealBlob"], mapper: { serializedName: "x-ms-seal-blob", @@ -24907,7 +25967,7 @@ const sealBlob = { } } }; -const xMsRequiresSync = { +var xMsRequiresSync = { parameterPath: "xMsRequiresSync", mapper: { defaultValue: "true", @@ -24918,7 +25978,7 @@ const xMsRequiresSync = { } } }; -const sourceContentMD5 = { +var sourceContentMD5 = { parameterPath: ["options", "sourceContentMD5"], mapper: { serializedName: "x-ms-source-content-md5", @@ -24928,7 +25988,7 @@ const sourceContentMD5 = { } } }; -const comp13 = { +var comp13 = { parameterPath: "comp", mapper: { defaultValue: "copy", @@ -24939,7 +25999,7 @@ const comp13 = { } } }; -const copyActionAbortConstant = { +var copyActionAbortConstant = { parameterPath: "copyActionAbortConstant", mapper: { defaultValue: "abort", @@ -24950,7 +26010,7 @@ const copyActionAbortConstant = { } } }; -const copyId = { +var copyId = { parameterPath: "copyId", mapper: { serializedName: "copyid", @@ -24961,7 +26021,7 @@ const copyId = { } } }; -const comp14 = { +var comp14 = { parameterPath: "comp", mapper: { defaultValue: "tier", @@ -24972,7 +26032,7 @@ const comp14 = { } } }; -const tier1 = { +var tier1 = { parameterPath: "tier", mapper: { serializedName: "x-ms-access-tier", @@ -24999,11 +26059,11 @@ const tier1 = { } } }; -const queryRequest = { +var queryRequest = { parameterPath: ["options", "queryRequest"], mapper: QueryRequest }; -const comp15 = { +var comp15 = { parameterPath: "comp", mapper: { defaultValue: "query", @@ -25014,7 +26074,7 @@ const comp15 = { } } }; -const comp16 = { +var comp16 = { parameterPath: "comp", mapper: { defaultValue: "tags", @@ -25025,11 +26085,11 @@ const comp16 = { } } }; -const tags = { +var tags = { parameterPath: ["options", "tags"], mapper: BlobTags }; -const transactionalContentMD5 = { +var transactionalContentMD5 = { parameterPath: ["options", "transactionalContentMD5"], mapper: { serializedName: "Content-MD5", @@ -25039,7 +26099,7 @@ const transactionalContentMD5 = { } } }; -const transactionalContentCrc64 = { +var transactionalContentCrc64 = { parameterPath: ["options", "transactionalContentCrc64"], mapper: { serializedName: "x-ms-content-crc64", @@ -25049,7 +26109,7 @@ const transactionalContentCrc64 = { } } }; -const blobType = { +var blobType = { parameterPath: "blobType", mapper: { defaultValue: "PageBlob", @@ -25060,7 +26120,7 @@ const blobType = { } } }; -const blobContentLength = { +var blobContentLength = { parameterPath: "blobContentLength", mapper: { serializedName: "x-ms-blob-content-length", @@ -25071,7 +26131,7 @@ const blobContentLength = { } } }; -const blobSequenceNumber = { +var blobSequenceNumber = { parameterPath: ["options", "blobSequenceNumber"], mapper: { serializedName: "x-ms-blob-sequence-number", @@ -25081,7 +26141,7 @@ const blobSequenceNumber = { } } }; -const contentType2 = { +var contentType2 = { parameterPath: ["options", "contentType"], mapper: { defaultValue: "application/octet-stream", @@ -25092,7 +26152,7 @@ const contentType2 = { } } }; -const body1 = { +var body1 = { parameterPath: "body", mapper: { serializedName: "body", @@ -25103,7 +26163,7 @@ const body1 = { } } }; -const accept2 = { +var accept2 = { parameterPath: "accept", mapper: { defaultValue: "application/xml", @@ -25114,7 +26174,7 @@ const accept2 = { } } }; -const comp17 = { +var comp17 = { parameterPath: "comp", mapper: { defaultValue: "page", @@ -25125,7 +26185,7 @@ const comp17 = { } } }; -const pageWrite = { +var pageWrite = { parameterPath: "pageWrite", mapper: { defaultValue: "update", @@ -25136,7 +26196,7 @@ const pageWrite = { } } }; -const ifSequenceNumberLessThanOrEqualTo = { +var ifSequenceNumberLessThanOrEqualTo = { parameterPath: [ "options", "sequenceNumberAccessConditions", @@ -25150,7 +26210,7 @@ const ifSequenceNumberLessThanOrEqualTo = { } } }; -const ifSequenceNumberLessThan = { +var ifSequenceNumberLessThan = { parameterPath: [ "options", "sequenceNumberAccessConditions", @@ -25164,7 +26224,7 @@ const ifSequenceNumberLessThan = { } } }; -const ifSequenceNumberEqualTo = { +var ifSequenceNumberEqualTo = { parameterPath: [ "options", "sequenceNumberAccessConditions", @@ -25178,7 +26238,7 @@ const ifSequenceNumberEqualTo = { } } }; -const pageWrite1 = { +var pageWrite1 = { parameterPath: "pageWrite", mapper: { defaultValue: "clear", @@ -25189,7 +26249,7 @@ const pageWrite1 = { } } }; -const sourceUrl = { +var sourceUrl = { parameterPath: "sourceUrl", mapper: { serializedName: "x-ms-copy-source", @@ -25200,7 +26260,7 @@ const sourceUrl = { } } }; -const sourceRange = { +var sourceRange = { parameterPath: "sourceRange", mapper: { serializedName: "x-ms-source-range", @@ -25211,7 +26271,7 @@ const sourceRange = { } } }; -const sourceContentCrc64 = { +var sourceContentCrc64 = { parameterPath: ["options", "sourceContentCrc64"], mapper: { serializedName: "x-ms-source-content-crc64", @@ -25221,7 +26281,7 @@ const sourceContentCrc64 = { } } }; -const range1 = { +var range1 = { parameterPath: "range", mapper: { serializedName: "x-ms-range", @@ -25232,7 +26292,7 @@ const range1 = { } } }; -const comp18 = { +var comp18 = { parameterPath: "comp", mapper: { defaultValue: "pagelist", @@ -25243,7 +26303,7 @@ const comp18 = { } } }; -const prevsnapshot = { +var prevsnapshot = { parameterPath: ["options", "prevsnapshot"], mapper: { serializedName: "prevsnapshot", @@ -25253,7 +26313,7 @@ const prevsnapshot = { } } }; -const prevSnapshotUrl = { +var prevSnapshotUrl = { parameterPath: ["options", "prevSnapshotUrl"], mapper: { serializedName: "x-ms-previous-snapshot-url", @@ -25263,7 +26323,7 @@ const prevSnapshotUrl = { } } }; -const sequenceNumberAction = { +var sequenceNumberAction = { parameterPath: "sequenceNumberAction", mapper: { serializedName: "x-ms-sequence-number-action", @@ -25275,7 +26335,7 @@ const sequenceNumberAction = { } } }; -const comp19 = { +var comp19 = { parameterPath: "comp", mapper: { defaultValue: "incrementalcopy", @@ -25286,7 +26346,7 @@ const comp19 = { } } }; -const blobType1 = { +var blobType1 = { parameterPath: "blobType", mapper: { defaultValue: "AppendBlob", @@ -25297,7 +26357,7 @@ const blobType1 = { } } }; -const comp20 = { +var comp20 = { parameterPath: "comp", mapper: { defaultValue: "appendblock", @@ -25308,7 +26368,7 @@ const comp20 = { } } }; -const maxSize = { +var maxSize = { parameterPath: ["options", "appendPositionAccessConditions", "maxSize"], mapper: { serializedName: "x-ms-blob-condition-maxsize", @@ -25318,7 +26378,7 @@ const maxSize = { } } }; -const appendPosition = { +var appendPosition = { parameterPath: [ "options", "appendPositionAccessConditions", @@ -25332,7 +26392,7 @@ const appendPosition = { } } }; -const sourceRange1 = { +var sourceRange1 = { parameterPath: ["options", "sourceRange"], mapper: { serializedName: "x-ms-source-range", @@ -25342,7 +26402,7 @@ const sourceRange1 = { } } }; -const comp21 = { +var comp21 = { parameterPath: "comp", mapper: { defaultValue: "seal", @@ -25353,7 +26413,7 @@ const comp21 = { } } }; -const blobType2 = { +var blobType2 = { parameterPath: "blobType", mapper: { defaultValue: "BlockBlob", @@ -25364,7 +26424,7 @@ const blobType2 = { } } }; -const copySourceBlobProperties = { +var copySourceBlobProperties = { parameterPath: ["options", "copySourceBlobProperties"], mapper: { serializedName: "x-ms-copy-source-blob-properties", @@ -25374,7 +26434,7 @@ const copySourceBlobProperties = { } } }; -const comp22 = { +var comp22 = { parameterPath: "comp", mapper: { defaultValue: "block", @@ -25385,7 +26445,7 @@ const comp22 = { } } }; -const blockId = { +var blockId = { parameterPath: "blockId", mapper: { serializedName: "blockid", @@ -25396,11 +26456,11 @@ const blockId = { } } }; -const blocks = { +var blocks = { parameterPath: "blocks", mapper: BlockLookupList }; -const comp23 = { +var comp23 = { parameterPath: "comp", mapper: { defaultValue: "blocklist", @@ -25411,7 +26471,7 @@ const comp23 = { } } }; -const listType = { +var listType = { parameterPath: "listType", mapper: { defaultValue: "committed", @@ -25433,12 +26493,12 @@ const listType = { * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ /** Class representing a Service. */ -class Service { +var Service = /** @class */ (function () { /** * Initialize a new instance of the class Service class. * @param client Reference to the service client */ - constructor(client) { + function Service(client) { this.client = client; } /** @@ -25447,69 +26507,69 @@ class Service { * @param blobServiceProperties The StorageService properties. * @param options The options parameters. */ - setProperties(blobServiceProperties, options) { - const operationArguments = { - blobServiceProperties, + Service.prototype.setProperties = function (blobServiceProperties, options) { + var operationArguments = { + blobServiceProperties: blobServiceProperties, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setPropertiesOperationSpec); - } + }; /** * gets the properties of a storage account's Blob service, including properties for Storage Analytics * and CORS (Cross-Origin Resource Sharing) rules. * @param options The options parameters. */ - getProperties(options) { - const operationArguments = { + Service.prototype.getProperties = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); - } + }; /** * Retrieves statistics related to replication for the Blob service. It is only available on the * secondary location endpoint when read-access geo-redundant replication is enabled for the storage * account. * @param options The options parameters. */ - getStatistics(options) { - const operationArguments = { + Service.prototype.getStatistics = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getStatisticsOperationSpec); - } + }; /** * The List Containers Segment operation returns a list of the containers under the specified account * @param options The options parameters. */ - listContainersSegment(options) { - const operationArguments = { + Service.prototype.listContainersSegment = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, listContainersSegmentOperationSpec); - } + }; /** * Retrieves a user delegation key for the Blob service. This is only a valid operation when using * bearer token authentication. * @param keyInfo Key information * @param options The options parameters. */ - getUserDelegationKey(keyInfo, options) { - const operationArguments = { - keyInfo, + Service.prototype.getUserDelegationKey = function (keyInfo, options) { + var operationArguments = { + keyInfo: keyInfo, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getUserDelegationKeyOperationSpec); - } + }; /** * Returns the sku name and account kind * @param options The options parameters. */ - getAccountInfo(options) { - const operationArguments = { + Service.prototype.getAccountInfo = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); - } + }; /** * The Batch operation allows multiple API calls to be embedded into a single HTTP request. * @param contentLength The length of the request. @@ -25518,31 +26578,32 @@ class Service { * @param body Initial data * @param options The options parameters. */ - submitBatch(contentLength, multipartContentType, body, options) { - const operationArguments = { - contentLength, - multipartContentType, - body, + Service.prototype.submitBatch = function (contentLength, multipartContentType, body, options) { + var operationArguments = { + contentLength: contentLength, + multipartContentType: multipartContentType, + body: body, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec); - } + }; /** * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a * given search expression. Filter blobs searches across all containers within a storage account but * can be scoped within the expression to a single container. * @param options The options parameters. */ - filterBlobs(options) { - const operationArguments = { + Service.prototype.filterBlobs = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec); - } -} + }; + return Service; +}()); // Operation Specifications -const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); -const setPropertiesOperationSpec = { +var xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); +var setPropertiesOperationSpec = { path: "/", httpMethod: "PUT", responses: { @@ -25572,7 +26633,7 @@ const setPropertiesOperationSpec = { mediaType: "xml", serializer: xmlSerializer }; -const getPropertiesOperationSpec = { +var getPropertiesOperationSpec = { path: "/", httpMethod: "GET", responses: { @@ -25599,7 +26660,7 @@ const getPropertiesOperationSpec = { isXML: true, serializer: xmlSerializer }; -const getStatisticsOperationSpec = { +var getStatisticsOperationSpec = { path: "/", httpMethod: "GET", responses: { @@ -25626,7 +26687,7 @@ const getStatisticsOperationSpec = { isXML: true, serializer: xmlSerializer }; -const listContainersSegmentOperationSpec = { +var listContainersSegmentOperationSpec = { path: "/", httpMethod: "GET", responses: { @@ -25656,7 +26717,7 @@ const listContainersSegmentOperationSpec = { isXML: true, serializer: xmlSerializer }; -const getUserDelegationKeyOperationSpec = { +var getUserDelegationKeyOperationSpec = { path: "/", httpMethod: "POST", responses: { @@ -25687,7 +26748,7 @@ const getUserDelegationKeyOperationSpec = { mediaType: "xml", serializer: xmlSerializer }; -const getAccountInfoOperationSpec = { +var getAccountInfoOperationSpec = { path: "/", httpMethod: "GET", responses: { @@ -25705,7 +26766,7 @@ const getAccountInfoOperationSpec = { isXML: true, serializer: xmlSerializer }; -const submitBatchOperationSpec = { +var submitBatchOperationSpec = { path: "/", httpMethod: "POST", responses: { @@ -25737,7 +26798,7 @@ const submitBatchOperationSpec = { mediaType: "xml", serializer: xmlSerializer }; -const filterBlobsOperationSpec = { +var filterBlobsOperationSpec = { path: "/", httpMethod: "GET", responses: { @@ -25775,12 +26836,12 @@ const filterBlobsOperationSpec = { * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ /** Class representing a Container. */ -class Container { +var Container = /** @class */ (function () { /** * Initialize a new instance of the class Container class. * @param client Reference to the service client */ - constructor(client) { + function Container(client) { this.client = client; } /** @@ -25788,88 +26849,88 @@ class Container { * exists, the operation fails * @param options The options parameters. */ - create(options) { - const operationArguments = { + Container.prototype.create = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, createOperationSpec); - } + }; /** * returns all user-defined metadata and system properties for the specified container. The data * returned does not include the container's list of blobs * @param options The options parameters. */ - getProperties(options) { - const operationArguments = { + Container.prototype.getProperties = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$1); - } + }; /** * operation marks the specified container for deletion. The container and any blobs contained within * it are later deleted during garbage collection * @param options The options parameters. */ - delete(options) { - const operationArguments = { + Container.prototype.delete = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, deleteOperationSpec); - } + }; /** * operation sets one or more user-defined name-value pairs for the specified container. * @param options The options parameters. */ - setMetadata(options) { - const operationArguments = { + Container.prototype.setMetadata = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec); - } + }; /** * gets the permissions for the specified container. The permissions indicate whether container data * may be accessed publicly. * @param options The options parameters. */ - getAccessPolicy(options) { - const operationArguments = { + Container.prototype.getAccessPolicy = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getAccessPolicyOperationSpec); - } + }; /** * sets the permissions for the specified container. The permissions indicate whether blobs in a * container may be accessed publicly. * @param options The options parameters. */ - setAccessPolicy(options) { - const operationArguments = { + Container.prototype.setAccessPolicy = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setAccessPolicyOperationSpec); - } + }; /** * Restores a previously-deleted container. * @param options The options parameters. */ - restore(options) { - const operationArguments = { + Container.prototype.restore = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, restoreOperationSpec); - } + }; /** * Renames an existing container. * @param sourceContainerName Required. Specifies the name of the container to rename. * @param options The options parameters. */ - rename(sourceContainerName, options) { - const operationArguments = { - sourceContainerName, + Container.prototype.rename = function (sourceContainerName, options) { + var operationArguments = { + sourceContainerName: sourceContainerName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, renameOperationSpec); - } + }; /** * The Batch operation allows multiple API calls to be embedded into a single HTTP request. * @param contentLength The length of the request. @@ -25878,63 +26939,63 @@ class Container { * @param body Initial data * @param options The options parameters. */ - submitBatch(contentLength, multipartContentType, body, options) { - const operationArguments = { - contentLength, - multipartContentType, - body, + Container.prototype.submitBatch = function (contentLength, multipartContentType, body, options) { + var operationArguments = { + contentLength: contentLength, + multipartContentType: multipartContentType, + body: body, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec$1); - } + }; /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can * be 15 to 60 seconds, or can be infinite * @param options The options parameters. */ - acquireLease(options) { - const operationArguments = { + Container.prototype.acquireLease = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec); - } + }; /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can * be 15 to 60 seconds, or can be infinite * @param leaseId Specifies the current lease ID on the resource. * @param options The options parameters. */ - releaseLease(leaseId, options) { - const operationArguments = { - leaseId, + Container.prototype.releaseLease = function (leaseId, options) { + var operationArguments = { + leaseId: leaseId, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec); - } + }; /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can * be 15 to 60 seconds, or can be infinite * @param leaseId Specifies the current lease ID on the resource. * @param options The options parameters. */ - renewLease(leaseId, options) { - const operationArguments = { - leaseId, + Container.prototype.renewLease = function (leaseId, options) { + var operationArguments = { + leaseId: leaseId, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec); - } + }; /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can * be 15 to 60 seconds, or can be infinite * @param options The options parameters. */ - breakLease(options) { - const operationArguments = { + Container.prototype.breakLease = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec); - } + }; /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can * be 15 to 60 seconds, or can be infinite @@ -25944,24 +27005,24 @@ class Container { * (String) for a list of valid GUID string formats. * @param options The options parameters. */ - changeLease(leaseId, proposedLeaseId, options) { - const operationArguments = { - leaseId, - proposedLeaseId, + Container.prototype.changeLease = function (leaseId, proposedLeaseId, options) { + var operationArguments = { + leaseId: leaseId, + proposedLeaseId: proposedLeaseId, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec); - } + }; /** * [Update] The List Blobs operation returns a list of the blobs under the specified container * @param options The options parameters. */ - listBlobFlatSegment(options) { - const operationArguments = { + Container.prototype.listBlobFlatSegment = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, listBlobFlatSegmentOperationSpec); - } + }; /** * [Update] The List Blobs operation returns a list of the blobs under the specified container * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix @@ -25970,27 +27031,28 @@ class Container { * character or a string. * @param options The options parameters. */ - listBlobHierarchySegment(delimiter, options) { - const operationArguments = { - delimiter, + Container.prototype.listBlobHierarchySegment = function (delimiter, options) { + var operationArguments = { + delimiter: delimiter, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, listBlobHierarchySegmentOperationSpec); - } + }; /** * Returns the sku name and account kind * @param options The options parameters. */ - getAccountInfo(options) { - const operationArguments = { + Container.prototype.getAccountInfo = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$1); - } -} + }; + return Container; +}()); // Operation Specifications -const xmlSerializer$1 = new coreHttp.Serializer(Mappers, /* isXml */ true); -const createOperationSpec = { +var xmlSerializer$1 = new coreHttp.Serializer(Mappers, /* isXml */ true); +var createOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -26016,7 +27078,7 @@ const createOperationSpec = { isXML: true, serializer: xmlSerializer$1 }; -const getPropertiesOperationSpec$1 = { +var getPropertiesOperationSpec$1 = { path: "/{containerName}", httpMethod: "GET", responses: { @@ -26039,7 +27101,7 @@ const getPropertiesOperationSpec$1 = { isXML: true, serializer: xmlSerializer$1 }; -const deleteOperationSpec = { +var deleteOperationSpec = { path: "/{containerName}", httpMethod: "DELETE", responses: { @@ -26064,7 +27126,7 @@ const deleteOperationSpec = { isXML: true, serializer: xmlSerializer$1 }; -const setMetadataOperationSpec = { +var setMetadataOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -26093,7 +27155,7 @@ const setMetadataOperationSpec = { isXML: true, serializer: xmlSerializer$1 }; -const getAccessPolicyOperationSpec = { +var getAccessPolicyOperationSpec = { path: "/{containerName}", httpMethod: "GET", responses: { @@ -26132,7 +27194,7 @@ const getAccessPolicyOperationSpec = { isXML: true, serializer: xmlSerializer$1 }; -const setAccessPolicyOperationSpec = { +var setAccessPolicyOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -26166,7 +27228,7 @@ const setAccessPolicyOperationSpec = { mediaType: "xml", serializer: xmlSerializer$1 }; -const restoreOperationSpec = { +var restoreOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -26194,7 +27256,7 @@ const restoreOperationSpec = { isXML: true, serializer: xmlSerializer$1 }; -const renameOperationSpec = { +var renameOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -26222,7 +27284,7 @@ const renameOperationSpec = { isXML: true, serializer: xmlSerializer$1 }; -const submitBatchOperationSpec$1 = { +var submitBatchOperationSpec$1 = { path: "/{containerName}", httpMethod: "POST", responses: { @@ -26258,7 +27320,7 @@ const submitBatchOperationSpec$1 = { mediaType: "xml", serializer: xmlSerializer$1 }; -const acquireLeaseOperationSpec = { +var acquireLeaseOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -26289,7 +27351,7 @@ const acquireLeaseOperationSpec = { isXML: true, serializer: xmlSerializer$1 }; -const releaseLeaseOperationSpec = { +var releaseLeaseOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -26319,7 +27381,7 @@ const releaseLeaseOperationSpec = { isXML: true, serializer: xmlSerializer$1 }; -const renewLeaseOperationSpec = { +var renewLeaseOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -26349,7 +27411,7 @@ const renewLeaseOperationSpec = { isXML: true, serializer: xmlSerializer$1 }; -const breakLeaseOperationSpec = { +var breakLeaseOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -26379,7 +27441,7 @@ const breakLeaseOperationSpec = { isXML: true, serializer: xmlSerializer$1 }; -const changeLeaseOperationSpec = { +var changeLeaseOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -26410,7 +27472,7 @@ const changeLeaseOperationSpec = { isXML: true, serializer: xmlSerializer$1 }; -const listBlobFlatSegmentOperationSpec = { +var listBlobFlatSegmentOperationSpec = { path: "/{containerName}", httpMethod: "GET", responses: { @@ -26441,7 +27503,7 @@ const listBlobFlatSegmentOperationSpec = { isXML: true, serializer: xmlSerializer$1 }; -const listBlobHierarchySegmentOperationSpec = { +var listBlobHierarchySegmentOperationSpec = { path: "/{containerName}", httpMethod: "GET", responses: { @@ -26473,7 +27535,7 @@ const listBlobHierarchySegmentOperationSpec = { isXML: true, serializer: xmlSerializer$1 }; -const getAccountInfoOperationSpec$1 = { +var getAccountInfoOperationSpec$1 = { path: "/{containerName}", httpMethod: "GET", responses: { @@ -26500,12 +27562,12 @@ const getAccountInfoOperationSpec$1 = { * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ /** Class representing a Blob. */ -class Blob$1 { +var Blob$1 = /** @class */ (function () { /** * Initialize a new instance of the class Blob class. * @param client Reference to the service client */ - constructor(client) { + function Blob(client) { this.client = client; } /** @@ -26513,23 +27575,23 @@ class Blob$1 { * properties. You can also call Download to read a snapshot. * @param options The options parameters. */ - download(options) { - const operationArguments = { + Blob.prototype.download = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, downloadOperationSpec); - } + }; /** * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system * properties for the blob. It does not return the content of the blob. * @param options The options parameters. */ - getProperties(options) { - const operationArguments = { + Blob.prototype.getProperties = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$2); - } + }; /** * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is * permanently removed from the storage account. If the storage account's soft delete feature is @@ -26545,32 +27607,32 @@ class Blob$1 { * (ResourceNotFound). * @param options The options parameters. */ - delete(options) { - const operationArguments = { + Blob.prototype.delete = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, deleteOperationSpec$1); - } + }; /** * Set the owner, group, permissions, or access control list for a blob. * @param options The options parameters. */ - setAccessControl(options) { - const operationArguments = { + Blob.prototype.setAccessControl = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setAccessControlOperationSpec); - } + }; /** * Get the owner, group, permissions, or access control list for a blob. * @param options The options parameters. */ - getAccessControl(options) { - const operationArguments = { + Blob.prototype.getAccessControl = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getAccessControlOperationSpec); - } + }; /** * Rename a blob/file. By default, the destination is overwritten and if the destination already * exists and has a lease the lease is broken. This operation supports conditional HTTP requests. For @@ -26582,93 +27644,93 @@ class Blob$1 { * existing properties; otherwise, the existing properties will be preserved. * @param options The options parameters. */ - rename(renameSource, options) { - const operationArguments = { - renameSource, + Blob.prototype.rename = function (renameSource, options) { + var operationArguments = { + renameSource: renameSource, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, renameOperationSpec$1); - } + }; /** * Undelete a blob that was previously soft deleted * @param options The options parameters. */ - undelete(options) { - const operationArguments = { + Blob.prototype.undelete = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, undeleteOperationSpec); - } + }; /** * Sets the time a blob will expire and be deleted. * @param expiryOptions Required. Indicates mode of the expiry time * @param options The options parameters. */ - setExpiry(expiryOptions, options) { - const operationArguments = { - expiryOptions, + Blob.prototype.setExpiry = function (expiryOptions, options) { + var operationArguments = { + expiryOptions: expiryOptions, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setExpiryOperationSpec); - } + }; /** * The Set HTTP Headers operation sets system properties on the blob * @param options The options parameters. */ - setHttpHeaders(options) { - const operationArguments = { + Blob.prototype.setHttpHeaders = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setHttpHeadersOperationSpec); - } + }; /** * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more * name-value pairs * @param options The options parameters. */ - setMetadata(options) { - const operationArguments = { + Blob.prototype.setMetadata = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec$1); - } + }; /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete * operations * @param options The options parameters. */ - acquireLease(options) { - const operationArguments = { + Blob.prototype.acquireLease = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec$1); - } + }; /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete * operations * @param leaseId Specifies the current lease ID on the resource. * @param options The options parameters. */ - releaseLease(leaseId, options) { - const operationArguments = { - leaseId, + Blob.prototype.releaseLease = function (leaseId, options) { + var operationArguments = { + leaseId: leaseId, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec$1); - } + }; /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete * operations * @param leaseId Specifies the current lease ID on the resource. * @param options The options parameters. */ - renewLease(leaseId, options) { - const operationArguments = { - leaseId, + Blob.prototype.renewLease = function (leaseId, options) { + var operationArguments = { + leaseId: leaseId, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec$1); - } + }; /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete * operations @@ -26678,35 +27740,35 @@ class Blob$1 { * (String) for a list of valid GUID string formats. * @param options The options parameters. */ - changeLease(leaseId, proposedLeaseId, options) { - const operationArguments = { - leaseId, - proposedLeaseId, + Blob.prototype.changeLease = function (leaseId, proposedLeaseId, options) { + var operationArguments = { + leaseId: leaseId, + proposedLeaseId: proposedLeaseId, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec$1); - } + }; /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete * operations * @param options The options parameters. */ - breakLease(options) { - const operationArguments = { + Blob.prototype.breakLease = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec$1); - } + }; /** * The Create Snapshot operation creates a read-only snapshot of a blob * @param options The options parameters. */ - createSnapshot(options) { - const operationArguments = { + Blob.prototype.createSnapshot = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, createSnapshotOperationSpec); - } + }; /** * The Start Copy From URL operation copies a blob or an internet resource to a new blob. * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to @@ -26715,13 +27777,13 @@ class Blob$1 { * access signature. * @param options The options parameters. */ - startCopyFromURL(copySource, options) { - const operationArguments = { - copySource, + Blob.prototype.startCopyFromURL = function (copySource, options) { + var operationArguments = { + copySource: copySource, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, startCopyFromURLOperationSpec); - } + }; /** * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return * a response until the copy is complete. @@ -26731,13 +27793,13 @@ class Blob$1 { * access signature. * @param options The options parameters. */ - copyFromURL(copySource, options) { - const operationArguments = { - copySource, + Blob.prototype.copyFromURL = function (copySource, options) { + var operationArguments = { + copySource: copySource, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, copyFromURLOperationSpec); - } + }; /** * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination * blob with zero length and full metadata. @@ -26745,13 +27807,13 @@ class Blob$1 { * operation. * @param options The options parameters. */ - abortCopyFromURL(copyId, options) { - const operationArguments = { - copyId, + Blob.prototype.abortCopyFromURL = function (copyId, options) { + var operationArguments = { + copyId: copyId, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, abortCopyFromURLOperationSpec); - } + }; /** * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium * storage account and on a block blob in a blob storage account (locally redundant storage only). A @@ -26761,58 +27823,59 @@ class Blob$1 { * @param tier Indicates the tier to be set on the blob. * @param options The options parameters. */ - setTier(tier, options) { - const operationArguments = { - tier, + Blob.prototype.setTier = function (tier, options) { + var operationArguments = { + tier: tier, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setTierOperationSpec); - } + }; /** * Returns the sku name and account kind * @param options The options parameters. */ - getAccountInfo(options) { - const operationArguments = { + Blob.prototype.getAccountInfo = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$2); - } + }; /** * The Query operation enables users to select/project on blob data by providing simple query * expressions. * @param options The options parameters. */ - query(options) { - const operationArguments = { + Blob.prototype.query = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, queryOperationSpec); - } + }; /** * The Get Tags operation enables users to get the tags associated with a blob. * @param options The options parameters. */ - getTags(options) { - const operationArguments = { + Blob.prototype.getTags = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getTagsOperationSpec); - } + }; /** * The Set Tags operation enables users to set tags on a blob. * @param options The options parameters. */ - setTags(options) { - const operationArguments = { + Blob.prototype.setTags = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setTagsOperationSpec); - } -} + }; + return Blob; +}()); // Operation Specifications -const xmlSerializer$2 = new coreHttp.Serializer(Mappers, /* isXml */ true); -const downloadOperationSpec = { +var xmlSerializer$2 = new coreHttp.Serializer(Mappers, /* isXml */ true); +var downloadOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { @@ -26861,7 +27924,7 @@ const downloadOperationSpec = { isXML: true, serializer: xmlSerializer$2 }; -const getPropertiesOperationSpec$2 = { +var getPropertiesOperationSpec$2 = { path: "/{containerName}/{blob}", httpMethod: "HEAD", responses: { @@ -26896,7 +27959,7 @@ const getPropertiesOperationSpec$2 = { isXML: true, serializer: xmlSerializer$2 }; -const deleteOperationSpec$1 = { +var deleteOperationSpec$1 = { path: "/{containerName}/{blob}", httpMethod: "DELETE", responses: { @@ -26930,7 +27993,7 @@ const deleteOperationSpec$1 = { isXML: true, serializer: xmlSerializer$2 }; -const setAccessControlOperationSpec = { +var setAccessControlOperationSpec = { path: "/{filesystem}/{path}", httpMethod: "PATCH", responses: { @@ -26961,7 +28024,7 @@ const setAccessControlOperationSpec = { isXML: true, serializer: xmlSerializer$2 }; -const getAccessControlOperationSpec = { +var getAccessControlOperationSpec = { path: "/{filesystem}/{path}", httpMethod: "HEAD", responses: { @@ -26992,7 +28055,7 @@ const getAccessControlOperationSpec = { isXML: true, serializer: xmlSerializer$2 }; -const renameOperationSpec$1 = { +var renameOperationSpec$1 = { path: "/{filesystem}/{path}", httpMethod: "PUT", responses: { @@ -27033,7 +28096,7 @@ const renameOperationSpec$1 = { isXML: true, serializer: xmlSerializer$2 }; -const undeleteOperationSpec = { +var undeleteOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -27055,7 +28118,7 @@ const undeleteOperationSpec = { isXML: true, serializer: xmlSerializer$2 }; -const setExpiryOperationSpec = { +var setExpiryOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -27079,7 +28142,7 @@ const setExpiryOperationSpec = { isXML: true, serializer: xmlSerializer$2 }; -const setHttpHeadersOperationSpec = { +var setHttpHeadersOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -27113,7 +28176,7 @@ const setHttpHeadersOperationSpec = { isXML: true, serializer: xmlSerializer$2 }; -const setMetadataOperationSpec$1 = { +var setMetadataOperationSpec$1 = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -27146,7 +28209,7 @@ const setMetadataOperationSpec$1 = { isXML: true, serializer: xmlSerializer$2 }; -const acquireLeaseOperationSpec$1 = { +var acquireLeaseOperationSpec$1 = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -27176,7 +28239,7 @@ const acquireLeaseOperationSpec$1 = { isXML: true, serializer: xmlSerializer$2 }; -const releaseLeaseOperationSpec$1 = { +var releaseLeaseOperationSpec$1 = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -27205,7 +28268,7 @@ const releaseLeaseOperationSpec$1 = { isXML: true, serializer: xmlSerializer$2 }; -const renewLeaseOperationSpec$1 = { +var renewLeaseOperationSpec$1 = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -27234,7 +28297,7 @@ const renewLeaseOperationSpec$1 = { isXML: true, serializer: xmlSerializer$2 }; -const changeLeaseOperationSpec$1 = { +var changeLeaseOperationSpec$1 = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -27264,7 +28327,7 @@ const changeLeaseOperationSpec$1 = { isXML: true, serializer: xmlSerializer$2 }; -const breakLeaseOperationSpec$1 = { +var breakLeaseOperationSpec$1 = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -27293,7 +28356,7 @@ const breakLeaseOperationSpec$1 = { isXML: true, serializer: xmlSerializer$2 }; -const createSnapshotOperationSpec = { +var createSnapshotOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -27326,7 +28389,7 @@ const createSnapshotOperationSpec = { isXML: true, serializer: xmlSerializer$2 }; -const startCopyFromURLOperationSpec = { +var startCopyFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -27365,7 +28428,7 @@ const startCopyFromURLOperationSpec = { isXML: true, serializer: xmlSerializer$2 }; -const copyFromURLOperationSpec = { +var copyFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -27403,7 +28466,7 @@ const copyFromURLOperationSpec = { isXML: true, serializer: xmlSerializer$2 }; -const abortCopyFromURLOperationSpec = { +var abortCopyFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -27431,7 +28494,7 @@ const abortCopyFromURLOperationSpec = { isXML: true, serializer: xmlSerializer$2 }; -const setTierOperationSpec = { +var setTierOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -27465,7 +28528,7 @@ const setTierOperationSpec = { isXML: true, serializer: xmlSerializer$2 }; -const getAccountInfoOperationSpec$2 = { +var getAccountInfoOperationSpec$2 = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { @@ -27483,7 +28546,7 @@ const getAccountInfoOperationSpec$2 = { isXML: true, serializer: xmlSerializer$2 }; -const queryOperationSpec = { +var queryOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "POST", responses: { @@ -27533,7 +28596,7 @@ const queryOperationSpec = { mediaType: "xml", serializer: xmlSerializer$2 }; -const getTagsOperationSpec = { +var getTagsOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { @@ -27563,7 +28626,7 @@ const getTagsOperationSpec = { isXML: true, serializer: xmlSerializer$2 }; -const setTagsOperationSpec = { +var setTagsOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -27606,12 +28669,12 @@ const setTagsOperationSpec = { * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ /** Class representing a PageBlob. */ -class PageBlob { +var PageBlob = /** @class */ (function () { /** * Initialize a new instance of the class PageBlob class. * @param client Reference to the service client */ - constructor(client) { + function PageBlob(client) { this.client = client; } /** @@ -27621,40 +28684,40 @@ class PageBlob { * page blob size must be aligned to a 512-byte boundary. * @param options The options parameters. */ - create(contentLength, blobContentLength, options) { - const operationArguments = { - contentLength, - blobContentLength, + PageBlob.prototype.create = function (contentLength, blobContentLength, options) { + var operationArguments = { + contentLength: contentLength, + blobContentLength: blobContentLength, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, createOperationSpec$1); - } + }; /** * The Upload Pages operation writes a range of pages to a page blob * @param contentLength The length of the request. * @param body Initial data * @param options The options parameters. */ - uploadPages(contentLength, body, options) { - const operationArguments = { - contentLength, - body, + PageBlob.prototype.uploadPages = function (contentLength, body, options) { + var operationArguments = { + contentLength: contentLength, + body: body, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, uploadPagesOperationSpec); - } + }; /** * The Clear Pages operation clears a set of pages from a page blob * @param contentLength The length of the request. * @param options The options parameters. */ - clearPages(contentLength, options) { - const operationArguments = { - contentLength, + PageBlob.prototype.clearPages = function (contentLength, options) { + var operationArguments = { + contentLength: contentLength, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, clearPagesOperationSpec); - } + }; /** * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a * URL @@ -27666,51 +28729,51 @@ class PageBlob { * aligned and range-end is required. * @param options The options parameters. */ - uploadPagesFromURL(sourceUrl, sourceRange, contentLength, range, options) { - const operationArguments = { - sourceUrl, - sourceRange, - contentLength, - range, + PageBlob.prototype.uploadPagesFromURL = function (sourceUrl, sourceRange, contentLength, range, options) { + var operationArguments = { + sourceUrl: sourceUrl, + sourceRange: sourceRange, + contentLength: contentLength, + range: range, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, uploadPagesFromURLOperationSpec); - } + }; /** * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a * page blob * @param options The options parameters. */ - getPageRanges(options) { - const operationArguments = { + PageBlob.prototype.getPageRanges = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getPageRangesOperationSpec); - } + }; /** * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were * changed between target blob and previous snapshot. * @param options The options parameters. */ - getPageRangesDiff(options) { - const operationArguments = { + PageBlob.prototype.getPageRangesDiff = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getPageRangesDiffOperationSpec); - } + }; /** * Resize the Blob * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The * page blob size must be aligned to a 512-byte boundary. * @param options The options parameters. */ - resize(blobContentLength, options) { - const operationArguments = { - blobContentLength, + PageBlob.prototype.resize = function (blobContentLength, options) { + var operationArguments = { + blobContentLength: blobContentLength, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, resizeOperationSpec); - } + }; /** * Update the sequence number of the blob * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request. @@ -27718,13 +28781,13 @@ class PageBlob { * blob's sequence number * @param options The options parameters. */ - updateSequenceNumber(sequenceNumberAction, options) { - const operationArguments = { - sequenceNumberAction, + PageBlob.prototype.updateSequenceNumber = function (sequenceNumberAction, options) { + var operationArguments = { + sequenceNumberAction: sequenceNumberAction, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, updateSequenceNumberOperationSpec); - } + }; /** * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. * The snapshot is copied such that only the differential changes between the previously copied @@ -27737,18 +28800,19 @@ class PageBlob { * access signature. * @param options The options parameters. */ - copyIncremental(copySource, options) { - const operationArguments = { - copySource, + PageBlob.prototype.copyIncremental = function (copySource, options) { + var operationArguments = { + copySource: copySource, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, copyIncrementalOperationSpec); - } -} + }; + return PageBlob; +}()); // Operation Specifications -const xmlSerializer$3 = new coreHttp.Serializer(Mappers, /* isXml */ true); -const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); -const createOperationSpec$1 = { +var xmlSerializer$3 = new coreHttp.Serializer(Mappers, /* isXml */ true); +var serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); +var createOperationSpec$1 = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -27793,7 +28857,7 @@ const createOperationSpec$1 = { isXML: true, serializer: xmlSerializer$3 }; -const uploadPagesOperationSpec = { +var uploadPagesOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -27833,9 +28897,9 @@ const uploadPagesOperationSpec = { ifSequenceNumberEqualTo ], mediaType: "binary", - serializer + serializer: serializer }; -const clearPagesOperationSpec = { +var clearPagesOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -27873,7 +28937,7 @@ const clearPagesOperationSpec = { isXML: true, serializer: xmlSerializer$3 }; -const uploadPagesFromURLOperationSpec = { +var uploadPagesFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -27919,7 +28983,7 @@ const uploadPagesFromURLOperationSpec = { isXML: true, serializer: xmlSerializer$3 }; -const getPageRangesOperationSpec = { +var getPageRangesOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { @@ -27953,7 +29017,7 @@ const getPageRangesOperationSpec = { isXML: true, serializer: xmlSerializer$3 }; -const getPageRangesDiffOperationSpec = { +var getPageRangesDiffOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { @@ -27989,7 +29053,7 @@ const getPageRangesDiffOperationSpec = { isXML: true, serializer: xmlSerializer$3 }; -const resizeOperationSpec = { +var resizeOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -28022,7 +29086,7 @@ const resizeOperationSpec = { isXML: true, serializer: xmlSerializer$3 }; -const updateSequenceNumberOperationSpec = { +var updateSequenceNumberOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -28052,7 +29116,7 @@ const updateSequenceNumberOperationSpec = { isXML: true, serializer: xmlSerializer$3 }; -const copyIncrementalOperationSpec = { +var copyIncrementalOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -28089,12 +29153,12 @@ const copyIncrementalOperationSpec = { * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ /** Class representing a AppendBlob. */ -class AppendBlob { +var AppendBlob = /** @class */ (function () { /** * Initialize a new instance of the class AppendBlob class. * @param client Reference to the service client */ - constructor(client) { + function AppendBlob(client) { this.client = client; } /** @@ -28102,13 +29166,13 @@ class AppendBlob { * @param contentLength The length of the request. * @param options The options parameters. */ - create(contentLength, options) { - const operationArguments = { - contentLength, + AppendBlob.prototype.create = function (contentLength, options) { + var operationArguments = { + contentLength: contentLength, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, createOperationSpec$2); - } + }; /** * The Append Block operation commits a new block of data to the end of an existing append blob. The * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to @@ -28117,14 +29181,14 @@ class AppendBlob { * @param body Initial data * @param options The options parameters. */ - appendBlock(contentLength, body, options) { - const operationArguments = { - contentLength, - body, + AppendBlob.prototype.appendBlock = function (contentLength, body, options) { + var operationArguments = { + contentLength: contentLength, + body: body, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, appendBlockOperationSpec); - } + }; /** * The Append Block operation commits a new block of data to the end of an existing append blob where * the contents are read from a source url. The Append Block operation is permitted only if the blob @@ -28134,30 +29198,31 @@ class AppendBlob { * @param contentLength The length of the request. * @param options The options parameters. */ - appendBlockFromUrl(sourceUrl, contentLength, options) { - const operationArguments = { - sourceUrl, - contentLength, + AppendBlob.prototype.appendBlockFromUrl = function (sourceUrl, contentLength, options) { + var operationArguments = { + sourceUrl: sourceUrl, + contentLength: contentLength, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, appendBlockFromUrlOperationSpec); - } + }; /** * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version * 2019-12-12 version or later. * @param options The options parameters. */ - seal(options) { - const operationArguments = { + AppendBlob.prototype.seal = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, sealOperationSpec); - } -} + }; + return AppendBlob; +}()); // Operation Specifications -const xmlSerializer$4 = new coreHttp.Serializer(Mappers, /* isXml */ true); -const serializer$1 = new coreHttp.Serializer(Mappers, /* isXml */ false); -const createOperationSpec$2 = { +var xmlSerializer$4 = new coreHttp.Serializer(Mappers, /* isXml */ true); +var serializer$1 = new coreHttp.Serializer(Mappers, /* isXml */ false); +var createOperationSpec$2 = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -28199,7 +29264,7 @@ const createOperationSpec$2 = { isXML: true, serializer: xmlSerializer$4 }; -const appendBlockOperationSpec = { +var appendBlockOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -28238,7 +29303,7 @@ const appendBlockOperationSpec = { mediaType: "binary", serializer: serializer$1 }; -const appendBlockFromUrlOperationSpec = { +var appendBlockFromUrlOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -28282,7 +29347,7 @@ const appendBlockFromUrlOperationSpec = { isXML: true, serializer: xmlSerializer$4 }; -const sealOperationSpec = { +var sealOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -28319,12 +29384,12 @@ const sealOperationSpec = { * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ /** Class representing a BlockBlob. */ -class BlockBlob { +var BlockBlob = /** @class */ (function () { /** * Initialize a new instance of the class BlockBlob class. * @param client Reference to the service client */ - constructor(client) { + function BlockBlob(client) { this.client = client; } /** @@ -28336,14 +29401,14 @@ class BlockBlob { * @param body Initial data * @param options The options parameters. */ - upload(contentLength, body, options) { - const operationArguments = { - contentLength, - body, + BlockBlob.prototype.upload = function (contentLength, body, options) { + var operationArguments = { + contentLength: contentLength, + body: body, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, uploadOperationSpec); - } + }; /** * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are @@ -28357,14 +29422,14 @@ class BlockBlob { * access signature. * @param options The options parameters. */ - putBlobFromUrl(contentLength, copySource, options) { - const operationArguments = { - contentLength, - copySource, + BlockBlob.prototype.putBlobFromUrl = function (contentLength, copySource, options) { + var operationArguments = { + contentLength: contentLength, + copySource: copySource, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, putBlobFromUrlOperationSpec); - } + }; /** * The Stage Block operation creates a new block to be committed as part of a blob * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string @@ -28374,15 +29439,15 @@ class BlockBlob { * @param body Initial data * @param options The options parameters. */ - stageBlock(blockId, contentLength, body, options) { - const operationArguments = { - blockId, - contentLength, - body, + BlockBlob.prototype.stageBlock = function (blockId, contentLength, body, options) { + var operationArguments = { + blockId: blockId, + contentLength: contentLength, + body: body, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, stageBlockOperationSpec); - } + }; /** * The Stage Block operation creates a new block to be committed as part of a blob where the contents * are read from a URL. @@ -28393,15 +29458,15 @@ class BlockBlob { * @param sourceUrl Specify a URL to the copy source. * @param options The options parameters. */ - stageBlockFromURL(blockId, contentLength, sourceUrl, options) { - const operationArguments = { - blockId, - contentLength, - sourceUrl, + BlockBlob.prototype.stageBlockFromURL = function (blockId, contentLength, sourceUrl, options) { + var operationArguments = { + blockId: blockId, + contentLength: contentLength, + sourceUrl: sourceUrl, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, stageBlockFromURLOperationSpec); - } + }; /** * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the * blob. In order to be written as part of a blob, a block must have been successfully written to the @@ -28413,13 +29478,13 @@ class BlockBlob { * @param blocks * @param options The options parameters. */ - commitBlockList(blocks, options) { - const operationArguments = { - blocks, + BlockBlob.prototype.commitBlockList = function (blocks, options) { + var operationArguments = { + blocks: blocks, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, commitBlockListOperationSpec); - } + }; /** * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block * blob @@ -28427,18 +29492,19 @@ class BlockBlob { * blocks, or both lists together. * @param options The options parameters. */ - getBlockList(listType, options) { - const operationArguments = { - listType, + BlockBlob.prototype.getBlockList = function (listType, options) { + var operationArguments = { + listType: listType, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getBlockListOperationSpec); - } -} + }; + return BlockBlob; +}()); // Operation Specifications -const xmlSerializer$5 = new coreHttp.Serializer(Mappers, /* isXml */ true); -const serializer$2 = new coreHttp.Serializer(Mappers, /* isXml */ false); -const uploadOperationSpec = { +var xmlSerializer$5 = new coreHttp.Serializer(Mappers, /* isXml */ true); +var serializer$2 = new coreHttp.Serializer(Mappers, /* isXml */ false); +var uploadOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -28484,7 +29550,7 @@ const uploadOperationSpec = { mediaType: "binary", serializer: serializer$2 }; -const putBlobFromUrlOperationSpec = { +var putBlobFromUrlOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -28536,7 +29602,7 @@ const putBlobFromUrlOperationSpec = { isXML: true, serializer: xmlSerializer$5 }; -const stageBlockOperationSpec = { +var stageBlockOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -28572,7 +29638,7 @@ const stageBlockOperationSpec = { mediaType: "binary", serializer: serializer$2 }; -const stageBlockFromURLOperationSpec = { +var stageBlockFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -28612,7 +29678,7 @@ const stageBlockFromURLOperationSpec = { isXML: true, serializer: xmlSerializer$5 }; -const commitBlockListOperationSpec = { +var commitBlockListOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -28659,7 +29725,7 @@ const commitBlockListOperationSpec = { mediaType: "xml", serializer: xmlSerializer$5 }; -const getBlockListOperationSpec = { +var getBlockListOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { @@ -28694,23 +29760,23 @@ const getBlockListOperationSpec = { /** * The `@azure/logger` configuration for this package. */ -const logger = logger$1.createClientLogger("storage-blob"); +var logger = logger$1.createClientLogger("storage-blob"); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -const SDK_VERSION = "12.7.0"; -const SERVICE_VERSION = "2020-08-04"; -const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB -const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB -const BLOCK_BLOB_MAX_BLOCKS = 50000; -const DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; // 8MB -const DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; // 4MB -const DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; +var SDK_VERSION = "12.6.0"; +var SERVICE_VERSION = "2020-08-04"; +var BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB +var BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB +var BLOCK_BLOB_MAX_BLOCKS = 50000; +var DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; // 8MB +var DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; // 4MB +var DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; /** * The OAuth scope to use with Azure Storage. */ -const StorageOAuthScopes = "https://storage.azure.com/.default"; -const URLConstants = { +var StorageOAuthScopes = "https://storage.azure.com/.default"; +var URLConstants = { Parameters: { FORCE_BROWSER_NO_CACHE: "_", SIGNATURE: "sig", @@ -28719,14 +29785,14 @@ const URLConstants = { TIMEOUT: "timeout" } }; -const HTTPURLConnection = { +var HTTPURLConnection = { HTTP_ACCEPTED: 202, HTTP_CONFLICT: 409, HTTP_NOT_FOUND: 404, HTTP_PRECON_FAILED: 412, HTTP_RANGE_NOT_SATISFIABLE: 416 }; -const HeaderConstants = { +var HeaderConstants = { AUTHORIZATION: "Authorization", AUTHORIZATION_SCHEME: "Bearer", CONTENT_ENCODING: "Content-Encoding", @@ -28751,16 +29817,16 @@ const HeaderConstants = { X_MS_ERROR_CODE: "x-ms-error-code", X_MS_VERSION: "x-ms-version" }; -const ETagNone = ""; -const ETagAny = "*"; -const SIZE_1_MB = 1 * 1024 * 1024; -const BATCH_MAX_REQUEST = 256; -const BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB; -const HTTP_LINE_ENDING = "\r\n"; -const HTTP_VERSION_1_1 = "HTTP/1.1"; -const EncryptionAlgorithmAES25 = "AES256"; -const DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; -const StorageBlobLoggingAllowedHeaderNames = [ +var ETagNone = ""; +var ETagAny = "*"; +var SIZE_1_MB = 1 * 1024 * 1024; +var BATCH_MAX_REQUEST = 256; +var BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB; +var HTTP_LINE_ENDING = "\r\n"; +var HTTP_VERSION_1_1 = "HTTP/1.1"; +var EncryptionAlgorithmAES25 = "AES256"; +var DevelopmentConnectionString = "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;"; +var StorageBlobLoggingAllowedHeaderNames = [ "Access-Control-Allow-Origin", "Cache-Control", "Content-Length", @@ -28856,7 +29922,7 @@ const StorageBlobLoggingAllowedHeaderNames = [ "x-ms-if-tags", "x-ms-source-if-tags" ]; -const StorageBlobLoggingAllowedQueryParameters = [ +var StorageBlobLoggingAllowedQueryParameters = [ "comp", "maxresults", "rscc", @@ -28946,8 +30012,8 @@ const StorageBlobLoggingAllowedQueryParameters = [ * @param url - */ function escapeURLPath(url) { - const urlParsed = coreHttp.URLBuilder.parse(url); - let path = urlParsed.getPath(); + var urlParsed = coreHttp.URLBuilder.parse(url); + var path = urlParsed.getPath(); path = path || "/"; path = escape(path); urlParsed.setPath(path); @@ -28956,11 +30022,12 @@ function escapeURLPath(url) { function getProxyUriFromDevConnString(connectionString) { // Development Connection String // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key - let proxyUri = ""; + var proxyUri = ""; if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri - const matchCredentials = connectionString.split(";"); - for (const element of matchCredentials) { + var matchCredentials = connectionString.split(";"); + for (var _i = 0, matchCredentials_1 = matchCredentials; _i < matchCredentials_1.length; _i++) { + var element = matchCredentials_1[_i]; if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; } @@ -28969,8 +30036,9 @@ function getProxyUriFromDevConnString(connectionString) { return proxyUri; } function getValueInConnString(connectionString, argument) { - const elements = connectionString.split(";"); - for (const element of elements) { + var elements = connectionString.split(";"); + for (var _i = 0, elements_1 = elements; _i < elements_1.length; _i++) { + var element = elements_1[_i]; if (element.trim().startsWith(argument)) { return element.trim().match(argument + "=(.*)")[1]; } @@ -28984,24 +30052,24 @@ function getValueInConnString(connectionString, argument) { * @returns String key value pairs of the storage account's url and credentials. */ function extractConnectionStringParts(connectionString) { - let proxyUri = ""; + var proxyUri = ""; if (connectionString.startsWith("UseDevelopmentStorage=true")) { // Development connection string proxyUri = getProxyUriFromDevConnString(connectionString); connectionString = DevelopmentConnectionString; } // Matching BlobEndpoint in the Account connection string - let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); + var blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); // Slicing off '/' at the end if exists // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end) blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && connectionString.search("AccountKey=") !== -1) { // Account connection string - let defaultEndpointsProtocol = ""; - let accountName = ""; - let accountKey = Buffer.from("accountKey", "base64"); - let endpointSuffix = ""; + var defaultEndpointsProtocol = ""; + var accountName = ""; + var accountKey = Buffer.from("accountKey", "base64"); + var endpointSuffix = ""; // Get account name and key accountName = getValueInConnString(connectionString, "AccountName"); accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); @@ -29009,7 +30077,7 @@ function extractConnectionStringParts(connectionString) { // BlobEndpoint is not present in the Account connection string // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}` defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); - const protocol = defaultEndpointsProtocol.toLowerCase(); + var protocol = defaultEndpointsProtocol.toLowerCase(); if (protocol !== "https" && protocol !== "http") { throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); } @@ -29017,7 +30085,7 @@ function extractConnectionStringParts(connectionString) { if (!endpointSuffix) { throw new Error("Invalid EndpointSuffix in the provided Connection String"); } - blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + blobEndpoint = defaultEndpointsProtocol + "://" + accountName + ".blob." + endpointSuffix; } if (!accountName) { throw new Error("Invalid AccountName in the provided Connection String"); @@ -29028,22 +30096,22 @@ function extractConnectionStringParts(connectionString) { return { kind: "AccountConnString", url: blobEndpoint, - accountName, - accountKey, - proxyUri + accountName: accountName, + accountKey: accountKey, + proxyUri: proxyUri }; } else { // SAS connection string - const accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); - const accountName = getAccountNameFromUrl(blobEndpoint); + var accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); + var accountName = getAccountNameFromUrl(blobEndpoint); if (!blobEndpoint) { throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); } else if (!accountSas) { throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); } - return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; + return { kind: "SASConnString", url: blobEndpoint, accountName: accountName, accountSas: accountSas }; } } /** @@ -29067,9 +30135,9 @@ function escape(text) { * @returns An updated URL string */ function appendToURLPath(url, name) { - const urlParsed = coreHttp.URLBuilder.parse(url); - let path = urlParsed.getPath(); - path = path ? (path.endsWith("/") ? `${path}${name}` : `${path}/${name}`) : name; + var urlParsed = coreHttp.URLBuilder.parse(url); + var path = urlParsed.getPath(); + path = path ? (path.endsWith("/") ? "" + path + name : path + "/" + name) : name; urlParsed.setPath(path); return urlParsed.toString(); } @@ -29083,7 +30151,7 @@ function appendToURLPath(url, name) { * @returns An updated URL string */ function setURLParameter(url, name, value) { - const urlParsed = coreHttp.URLBuilder.parse(url); + var urlParsed = coreHttp.URLBuilder.parse(url); urlParsed.setQueryParameter(name, value); return urlParsed.toString(); } @@ -29094,7 +30162,7 @@ function setURLParameter(url, name, value) { * @param name - */ function getURLParameter(url, name) { - const urlParsed = coreHttp.URLBuilder.parse(url); + var urlParsed = coreHttp.URLBuilder.parse(url); return urlParsed.getQueryParameterValue(name); } /** @@ -29105,7 +30173,7 @@ function getURLParameter(url, name) { * @returns An updated URL string */ function setURLHost(url, host) { - const urlParsed = coreHttp.URLBuilder.parse(url); + var urlParsed = coreHttp.URLBuilder.parse(url); urlParsed.setHost(host); return urlParsed.toString(); } @@ -29115,7 +30183,7 @@ function setURLHost(url, host) { * @param url - Source URL string */ function getURLPath(url) { - const urlParsed = coreHttp.URLBuilder.parse(url); + var urlParsed = coreHttp.URLBuilder.parse(url); return urlParsed.getPath(); } /** @@ -29124,7 +30192,7 @@ function getURLPath(url) { * @param url - Source URL string */ function getURLScheme(url) { - const urlParsed = coreHttp.URLBuilder.parse(url); + var urlParsed = coreHttp.URLBuilder.parse(url); return urlParsed.getScheme(); } /** @@ -29133,17 +30201,17 @@ function getURLScheme(url) { * @param url - Source URL string */ function getURLPathAndQuery(url) { - const urlParsed = coreHttp.URLBuilder.parse(url); - const pathString = urlParsed.getPath(); + var urlParsed = coreHttp.URLBuilder.parse(url); + var pathString = urlParsed.getPath(); if (!pathString) { throw new RangeError("Invalid url without valid path."); } - let queryString = urlParsed.getQuery() || ""; + var queryString = urlParsed.getQuery() || ""; queryString = queryString.trim(); if (queryString != "") { - queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; // Ensure query string start with '?' + queryString = queryString.startsWith("?") ? queryString : "?" + queryString; // Ensure query string start with '?' } - return `${pathString}${queryString}`; + return "" + pathString + queryString; } /** * Get URL query key value pairs from an URL string. @@ -29151,23 +30219,24 @@ function getURLPathAndQuery(url) { * @param url - */ function getURLQueries(url) { - let queryString = coreHttp.URLBuilder.parse(url).getQuery(); + var queryString = coreHttp.URLBuilder.parse(url).getQuery(); if (!queryString) { return {}; } queryString = queryString.trim(); queryString = queryString.startsWith("?") ? queryString.substr(1) : queryString; - let querySubStrings = queryString.split("&"); - querySubStrings = querySubStrings.filter((value) => { - const indexOfEqual = value.indexOf("="); - const lastIndexOfEqual = value.lastIndexOf("="); + var querySubStrings = queryString.split("&"); + querySubStrings = querySubStrings.filter(function (value) { + var indexOfEqual = value.indexOf("="); + var lastIndexOfEqual = value.lastIndexOf("="); return (indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1); }); - const queries = {}; - for (const querySubString of querySubStrings) { - const splitResults = querySubString.split("="); - const key = splitResults[0]; - const value = splitResults[1]; + var queries = {}; + for (var _i = 0, querySubStrings_1 = querySubStrings; _i < querySubStrings_1.length; _i++) { + var querySubString = querySubStrings_1[_i]; + var splitResults = querySubString.split("="); + var key = splitResults[0]; + var value = splitResults[1]; queries[key] = value; } return queries; @@ -29180,8 +30249,8 @@ function getURLQueries(url) { * @returns An updated URL string. */ function appendToURLQuery(url, queryParts) { - const urlParsed = coreHttp.URLBuilder.parse(url); - let query = urlParsed.getQuery(); + var urlParsed = coreHttp.URLBuilder.parse(url); + var query = urlParsed.getQuery(); if (query) { query += "&" + queryParts; } @@ -29199,9 +30268,10 @@ function appendToURLQuery(url, queryParts) { * If false, YYYY-MM-DDThh:mm:ssZ will be returned. * @returns Date string in ISO8061 format, with or without 7 milliseconds component */ -function truncatedISO8061Date(date, withMilliseconds = true) { +function truncatedISO8061Date(date, withMilliseconds) { + if (withMilliseconds === void 0) { withMilliseconds = true; } // Date.toISOString() will return like "2018-10-29T06:34:36.139Z" - const dateString = date.toISOString(); + var dateString = date.toISOString(); return withMilliseconds ? dateString.substring(0, dateString.length - 1) + "0000" + "Z" : dateString.substring(0, dateString.length - 5) + "Z"; @@ -29221,14 +30291,14 @@ function base64encode(content) { */ function generateBlockID(blockIDPrefix, blockIndex) { // To generate a 64 bytes base64 string, source string should be 48 - const maxSourceStringLength = 48; + var maxSourceStringLength = 48; // A blob can have a maximum of 100,000 uncommitted blocks at any given time - const maxBlockIndexLength = 6; - const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; + var maxBlockIndexLength = 6; + var maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) { blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength); } - const res = blockIDPrefix + + var res = blockIDPrefix + padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0"); return base64encode(res); } @@ -29239,25 +30309,29 @@ function generateBlockID(blockIDPrefix, blockIndex) { * @param aborter - * @param abortError - */ -async function delay(timeInMs, aborter, abortError) { - return new Promise((resolve, reject) => { - let timeout; - const abortHandler = () => { - if (timeout !== undefined) { - clearTimeout(timeout); - } - reject(abortError); - }; - const resolveHandler = () => { - if (aborter !== undefined) { - aborter.removeEventListener("abort", abortHandler); - } - resolve(); - }; - timeout = setTimeout(resolveHandler, timeInMs); - if (aborter !== undefined) { - aborter.addEventListener("abort", abortHandler); - } +function delay(timeInMs, aborter, abortError) { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + return [2 /*return*/, new Promise(function (resolve, reject) { + var timeout; + var abortHandler = function () { + if (timeout !== undefined) { + clearTimeout(timeout); + } + reject(abortError); + }; + var resolveHandler = function () { + if (aborter !== undefined) { + aborter.removeEventListener("abort", abortHandler); + } + resolve(); + }; + timeout = setTimeout(resolveHandler, timeInMs); + if (aborter !== undefined) { + aborter.addEventListener("abort", abortHandler); + } + })]; + }); }); } /** @@ -29267,7 +30341,8 @@ async function delay(timeInMs, aborter, abortError) { * @param targetLength - * @param padString - */ -function padStart(currentString, targetLength, padString = " ") { +function padStart(currentString, targetLength, padString) { + if (padString === void 0) { padString = " "; } // TS doesn't know this code needs to run downlevel sometimes. // @ts-expect-error if (String.prototype.padStart) { @@ -29300,8 +30375,8 @@ function iEqual(str1, str2) { * @returns with the account name */ function getAccountNameFromUrl(url) { - const parsedUrl = coreHttp.URLBuilder.parse(url); - let accountName; + var parsedUrl = coreHttp.URLBuilder.parse(url); + var accountName; try { if (parsedUrl.getHost().split(".")[1] === "blob") { // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; @@ -29327,7 +30402,7 @@ function isIpEndpointStyle(parsedUrl) { if (parsedUrl.getHost() == undefined) { return false; } - const host = parsedUrl.getHost() + (parsedUrl.getPort() == undefined ? "" : ":" + parsedUrl.getPort()); + var host = parsedUrl.getHost() + (parsedUrl.getPort() == undefined ? "" : ":" + parsedUrl.getPort()); // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'. // Case 2: localhost(:port), use broad regex to match port part. // Case 3: Ipv4, use broad regex which just check if host contains Ipv4. @@ -29343,11 +30418,11 @@ function toBlobTagsString(tags) { if (tags === undefined) { return undefined; } - const tagPairs = []; - for (const key in tags) { + var tagPairs = []; + for (var key in tags) { if (tags.hasOwnProperty(key)) { - const value = tags[key]; - tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`); + var value = tags[key]; + tagPairs.push(encodeURIComponent(key) + "=" + encodeURIComponent(value)); } } return tagPairs.join("&"); @@ -29361,15 +30436,15 @@ function toBlobTags(tags) { if (tags === undefined) { return undefined; } - const res = { + var res = { blobTagSet: [] }; - for (const key in tags) { + for (var key in tags) { if (tags.hasOwnProperty(key)) { - const value = tags[key]; + var value = tags[key]; res.blobTagSet.push({ - key, - value + key: key, + value: value }); } } @@ -29384,8 +30459,9 @@ function toTags(tags) { if (tags === undefined) { return undefined; } - const res = {}; - for (const blobTag of tags.blobTagSet) { + var res = {}; + for (var _i = 0, _a = tags.blobTagSet; _i < _a.length; _i++) { + var blobTag = _a[_i]; res[blobTag.key] = blobTag.value; } return res; @@ -29444,18 +30520,18 @@ function parseObjectReplicationRecord(objectReplicationRecord) { // the policy id should already be stored in the ObjectReplicationDestinationPolicyId. return undefined; } - const orProperties = []; - for (const key in objectReplicationRecord) { - const ids = key.split("_"); - const policyPrefix = "or-"; + var orProperties = []; + var _loop_1 = function (key) { + var ids = key.split("_"); + var policyPrefix = "or-"; if (ids[0].startsWith(policyPrefix)) { ids[0] = ids[0].substring(policyPrefix.length); } - const rule = { + var rule = { ruleId: ids[1], replicationStatus: objectReplicationRecord[key] }; - const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); + var policyIndex = orProperties.findIndex(function (policy) { return policy.policyId === ids[0]; }); if (policyIndex > -1) { orProperties[policyIndex].rules.push(rule); } @@ -29465,6 +30541,9 @@ function parseObjectReplicationRecord(objectReplicationRecord) { rules: [rule] }); } + }; + for (var key in objectReplicationRecord) { + _loop_1(key); } return orProperties; } @@ -29491,42 +30570,51 @@ function attachCredential(thing, credential) { * * 3. Remove content-length header to avoid browsers warning */ -class StorageBrowserPolicy extends coreHttp.BaseRequestPolicy { +var StorageBrowserPolicy = /** @class */ (function (_super) { + tslib.__extends(StorageBrowserPolicy, _super); /** * Creates an instance of StorageBrowserPolicy. * @param nextPolicy - * @param options - */ - constructor(nextPolicy, options) { - super(nextPolicy, options); + function StorageBrowserPolicy(nextPolicy, options) { + return _super.call(this, nextPolicy, options) || this; } /** * Sends out request. * * @param request - */ - async sendRequest(request) { - { - return this._nextPolicy.sendRequest(request); - } - } -} + StorageBrowserPolicy.prototype.sendRequest = function (request) { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + { + return [2 /*return*/, this._nextPolicy.sendRequest(request)]; + } + }); + }); + }; + return StorageBrowserPolicy; +}(coreHttp.BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. /** * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects. */ -class StorageBrowserPolicyFactory { +var StorageBrowserPolicyFactory = /** @class */ (function () { + function StorageBrowserPolicyFactory() { + } /** * Creates a StorageBrowserPolicyFactory object. * * @param nextPolicy - * @param options - */ - create(nextPolicy, options) { + StorageBrowserPolicyFactory.prototype.create = function (nextPolicy, options) { return new StorageBrowserPolicy(nextPolicy, options); - } -} + }; + return StorageBrowserPolicyFactory; +}()); // Copyright (c) Microsoft Corporation. (function (StorageRetryPolicyType) { @@ -29540,7 +30628,7 @@ class StorageBrowserPolicyFactory { StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED"; })(exports.StorageRetryPolicyType || (exports.StorageRetryPolicyType = {})); // Default values of StorageRetryOptions -const DEFAULT_RETRY_OPTIONS = { +var DEFAULT_RETRY_OPTIONS = { maxRetryDelayInMs: 120 * 1000, maxTries: 4, retryDelayInMs: 4 * 1000, @@ -29548,11 +30636,12 @@ const DEFAULT_RETRY_OPTIONS = { secondaryHost: "", tryTimeoutInMs: undefined // Use server side default timeout strategy }; -const RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted."); +var RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted."); /** * Retry policy with exponential retry and linear retry implemented. */ -class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { +var StorageRetryPolicy = /** @class */ (function (_super) { + tslib.__extends(StorageRetryPolicy, _super); /** * Creates an instance of RetryPolicy. * @@ -29560,10 +30649,11 @@ class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { * @param options - * @param retryOptions - */ - constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS) { - super(nextPolicy, options); + function StorageRetryPolicy(nextPolicy, options, retryOptions) { + if (retryOptions === void 0) { retryOptions = DEFAULT_RETRY_OPTIONS; } + var _this = _super.call(this, nextPolicy, options) || this; // Initialize retry options - this.retryOptions = { + _this.retryOptions = { retryPolicyType: retryOptions.retryPolicyType ? retryOptions.retryPolicyType : DEFAULT_RETRY_OPTIONS.retryPolicyType, @@ -29585,15 +30675,20 @@ class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { ? retryOptions.secondaryHost : DEFAULT_RETRY_OPTIONS.secondaryHost }; + return _this; } /** * Sends request. * * @param request - */ - async sendRequest(request) { - return this.attemptSendRequest(request, false, 1); - } + StorageRetryPolicy.prototype.sendRequest = function (request) { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + return [2 /*return*/, this.attemptSendRequest(request, false, 1)]; + }); + }); + }; /** * Decide and perform next retry. Won't mutate request parameter. * @@ -29604,37 +30699,52 @@ class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { * @param attempt - How many retries has been attempted to performed, starting from 1, which includes * the attempt will be performed by this method call. */ - async attemptSendRequest(request, secondaryHas404, attempt) { - const newRequest = request.clone(); - const isPrimaryRetry = secondaryHas404 || - !this.retryOptions.secondaryHost || - !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || - attempt % 2 === 1; - if (!isPrimaryRetry) { - newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost); - } - // Set the server-side timeout query parameter "timeout=[seconds]" - if (this.retryOptions.tryTimeoutInMs) { - newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1000).toString()); - } - let response; - try { - logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); - response = await this._nextPolicy.sendRequest(newRequest); - if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { - return response; - } - secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404); - } - catch (err) { - logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); - if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { - throw err; - } - } - await this.delay(isPrimaryRetry, attempt, request.abortSignal); - return await this.attemptSendRequest(request, secondaryHas404, ++attempt); - } + StorageRetryPolicy.prototype.attemptSendRequest = function (request, secondaryHas404, attempt) { + return tslib.__awaiter(this, void 0, void 0, function () { + var newRequest, isPrimaryRetry, response, err_1; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + newRequest = request.clone(); + isPrimaryRetry = secondaryHas404 || + !this.retryOptions.secondaryHost || + !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || + attempt % 2 === 1; + if (!isPrimaryRetry) { + newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost); + } + // Set the server-side timeout query parameter "timeout=[seconds]" + if (this.retryOptions.tryTimeoutInMs) { + newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1000).toString()); + } + _a.label = 1; + case 1: + _a.trys.push([1, 3, , 4]); + logger.info("RetryPolicy: =====> Try=" + attempt + " " + (isPrimaryRetry ? "Primary" : "Secondary")); + return [4 /*yield*/, this._nextPolicy.sendRequest(newRequest)]; + case 2: + response = _a.sent(); + if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { + return [2 /*return*/, response]; + } + secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404); + return [3 /*break*/, 4]; + case 3: + err_1 = _a.sent(); + logger.error("RetryPolicy: Caught error, message: " + err_1.message + ", code: " + err_1.code); + if (!this.shouldRetry(isPrimaryRetry, attempt, response, err_1)) { + throw err_1; + } + return [3 /*break*/, 4]; + case 4: return [4 /*yield*/, this.delay(isPrimaryRetry, attempt, request.abortSignal)]; + case 5: + _a.sent(); + return [4 /*yield*/, this.attemptSendRequest(request, secondaryHas404, ++attempt)]; + case 6: return [2 /*return*/, _a.sent()]; + } + }); + }); + }; /** * Decide whether to retry according to last HTTP response and retry counters. * @@ -29643,15 +30753,15 @@ class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { * @param response - * @param err - */ - shouldRetry(isPrimaryRetry, attempt, response, err) { + StorageRetryPolicy.prototype.shouldRetry = function (isPrimaryRetry, attempt, response, err) { if (attempt >= this.retryOptions.maxTries) { - logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions - .maxTries}, no further try.`); + logger.info("RetryPolicy: Attempt(s) " + attempt + " >= maxTries " + this.retryOptions + .maxTries + ", no further try."); return false; } // Handle network failures, you may need to customize the list when you implement // your own http client - const retriableErrors = [ + var retriableErrors = [ "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", @@ -29663,11 +30773,12 @@ class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { "REQUEST_SEND_ERROR" // For default xhr based http client provided in ms-rest-js ]; if (err) { - for (const retriableError of retriableErrors) { + for (var _i = 0, retriableErrors_1 = retriableErrors; _i < retriableErrors_1.length; _i++) { + var retriableError = retriableErrors_1[_i]; if (err.name.toUpperCase().includes(retriableError) || err.message.toUpperCase().includes(retriableError) || (err.code && err.code.toString().toUpperCase() === retriableError)) { - logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + logger.info("RetryPolicy: Network error " + retriableError + " found, will retry."); return true; } } @@ -29676,23 +30787,23 @@ class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { // the resource was not found. This may be due to replication delay. So, in this // case, we'll never try the secondary again for this operation. if (response || err) { - const statusCode = response ? response.status : err ? err.statusCode : 0; + var statusCode = response ? response.status : err ? err.statusCode : 0; if (!isPrimaryRetry && statusCode === 404) { - logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + logger.info("RetryPolicy: Secondary access with 404, will retry."); return true; } // Server internal error or server timeout if (statusCode === 503 || statusCode === 500) { - logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + logger.info("RetryPolicy: Will retry for status code " + statusCode + "."); return true; } } - if ((err === null || err === void 0 ? void 0 : err.code) === "PARSE_ERROR" && (err === null || err === void 0 ? void 0 : err.message.startsWith(`Error "Error: Unclosed root tag`))) { + if ((err === null || err === void 0 ? void 0 : err.code) === "PARSE_ERROR" && (err === null || err === void 0 ? void 0 : err.message.startsWith("Error \"Error: Unclosed root tag"))) { logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); return true; } return false; - } + }; /** * Delay a calculated time between retries. * @@ -29700,36 +30811,42 @@ class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { * @param attempt - * @param abortSignal - */ - async delay(isPrimaryRetry, attempt, abortSignal) { - let delayTimeInMs = 0; - if (isPrimaryRetry) { - switch (this.retryOptions.retryPolicyType) { - case exports.StorageRetryPolicyType.EXPONENTIAL: - delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); - break; - case exports.StorageRetryPolicyType.FIXED: - delayTimeInMs = this.retryOptions.retryDelayInMs; - break; - } - } - else { - delayTimeInMs = Math.random() * 1000; - } - logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); - return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR); - } -} + StorageRetryPolicy.prototype.delay = function (isPrimaryRetry, attempt, abortSignal) { + return tslib.__awaiter(this, void 0, void 0, function () { + var delayTimeInMs; + return tslib.__generator(this, function (_a) { + delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (this.retryOptions.retryPolicyType) { + case exports.StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); + break; + case exports.StorageRetryPolicyType.FIXED: + delayTimeInMs = this.retryOptions.retryDelayInMs; + break; + } + } + else { + delayTimeInMs = Math.random() * 1000; + } + logger.info("RetryPolicy: Delay for " + delayTimeInMs + "ms"); + return [2 /*return*/, delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR)]; + }); + }); + }; + return StorageRetryPolicy; +}(coreHttp.BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. /** * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects. */ -class StorageRetryPolicyFactory { +var StorageRetryPolicyFactory = /** @class */ (function () { /** * Creates an instance of StorageRetryPolicyFactory. * @param retryOptions - */ - constructor(retryOptions) { + function StorageRetryPolicyFactory(retryOptions) { this.retryOptions = retryOptions; } /** @@ -29738,53 +30855,61 @@ class StorageRetryPolicyFactory { * @param nextPolicy - * @param options - */ - create(nextPolicy, options) { + StorageRetryPolicyFactory.prototype.create = function (nextPolicy, options) { return new StorageRetryPolicy(nextPolicy, options, this.retryOptions); - } -} + }; + return StorageRetryPolicyFactory; +}()); // Copyright (c) Microsoft Corporation. /** * Credential policy used to sign HTTP(S) requests before sending. This is an * abstract class. */ -class CredentialPolicy extends coreHttp.BaseRequestPolicy { +var CredentialPolicy = /** @class */ (function (_super) { + tslib.__extends(CredentialPolicy, _super); + function CredentialPolicy() { + return _super !== null && _super.apply(this, arguments) || this; + } /** * Sends out request. * * @param request - */ - sendRequest(request) { + CredentialPolicy.prototype.sendRequest = function (request) { return this._nextPolicy.sendRequest(this.signRequest(request)); - } + }; /** * Child classes must implement this method with request signing. This method * will be executed in {@link sendRequest}. * * @param request - */ - signRequest(request) { + CredentialPolicy.prototype.signRequest = function (request) { // Child classes must override this method with request signing. This method // will be executed in sendRequest(). return request; - } -} + }; + return CredentialPolicy; +}(coreHttp.BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. /** * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources * or for use with Shared Access Signatures (SAS). */ -class AnonymousCredentialPolicy extends CredentialPolicy { +var AnonymousCredentialPolicy = /** @class */ (function (_super) { + tslib.__extends(AnonymousCredentialPolicy, _super); /** * Creates an instance of AnonymousCredentialPolicy. * @param nextPolicy - * @param options - */ - constructor(nextPolicy, options) { - super(nextPolicy, options); + function AnonymousCredentialPolicy(nextPolicy, options) { + return _super.call(this, nextPolicy, options) || this; } -} + return AnonymousCredentialPolicy; +}(CredentialPolicy)); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. @@ -29792,21 +30917,24 @@ class AnonymousCredentialPolicy extends CredentialPolicy { * Credential is an abstract class for Azure Storage HTTP requests signing. This * class will host an credentialPolicyCreator factory which generates CredentialPolicy. */ -class Credential { +var Credential = /** @class */ (function () { + function Credential() { + } /** * Creates a RequestPolicy object. * * @param _nextPolicy - * @param _options - */ - create( + Credential.prototype.create = function ( // tslint:disable-next-line:variable-name _nextPolicy, // tslint:disable-next-line:variable-name _options) { throw new Error("Method should be implemented in children classes."); - } -} + }; + return Credential; +}()); // Copyright (c) Microsoft Corporation. /** @@ -29815,76 +30943,88 @@ class Credential { * HTTP(S) requests that read public resources or for use with Shared Access * Signatures (SAS). */ -class AnonymousCredential extends Credential { +var AnonymousCredential = /** @class */ (function (_super) { + tslib.__extends(AnonymousCredential, _super); + function AnonymousCredential() { + return _super !== null && _super.apply(this, arguments) || this; + } /** * Creates an {@link AnonymousCredentialPolicy} object. * * @param nextPolicy - * @param options - */ - create(nextPolicy, options) { + AnonymousCredential.prototype.create = function (nextPolicy, options) { return new AnonymousCredentialPolicy(nextPolicy, options); - } -} + }; + return AnonymousCredential; +}(Credential)); // Copyright (c) Microsoft Corporation. /** * TelemetryPolicy is a policy used to tag user-agent header for every requests. */ -class TelemetryPolicy extends coreHttp.BaseRequestPolicy { +var TelemetryPolicy = /** @class */ (function (_super) { + tslib.__extends(TelemetryPolicy, _super); /** * Creates an instance of TelemetryPolicy. * @param nextPolicy - * @param options - * @param telemetry - */ - constructor(nextPolicy, options, telemetry) { - super(nextPolicy, options); - this.telemetry = telemetry; + function TelemetryPolicy(nextPolicy, options, telemetry) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.telemetry = telemetry; + return _this; } /** * Sends out request. * * @param request - */ - async sendRequest(request) { - { - if (!request.headers) { - request.headers = new coreHttp.HttpHeaders(); - } - if (!request.headers.get(HeaderConstants.USER_AGENT)) { - request.headers.set(HeaderConstants.USER_AGENT, this.telemetry); - } - } - return this._nextPolicy.sendRequest(request); - } -} + TelemetryPolicy.prototype.sendRequest = function (request) { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + { + if (!request.headers) { + request.headers = new coreHttp.HttpHeaders(); + } + if (!request.headers.get(HeaderConstants.USER_AGENT)) { + request.headers.set(HeaderConstants.USER_AGENT, this.telemetry); + } + } + return [2 /*return*/, this._nextPolicy.sendRequest(request)]; + }); + }); + }; + return TelemetryPolicy; +}(coreHttp.BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. /** * TelemetryPolicyFactory is a factory class helping generating {@link TelemetryPolicy} objects. */ -class TelemetryPolicyFactory { +var TelemetryPolicyFactory = /** @class */ (function () { /** * Creates an instance of TelemetryPolicyFactory. * @param telemetry - */ - constructor(telemetry) { - const userAgentInfo = []; + function TelemetryPolicyFactory(telemetry) { + var userAgentInfo = []; { if (telemetry) { - const telemetryString = telemetry.userAgentPrefix || ""; + var telemetryString = telemetry.userAgentPrefix || ""; if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) { userAgentInfo.push(telemetryString); } } // e.g. azsdk-js-storageblob/10.0.0 - const libInfo = `azsdk-js-storageblob/${SDK_VERSION}`; + var libInfo = "azsdk-js-storageblob/" + SDK_VERSION; if (userAgentInfo.indexOf(libInfo) === -1) { userAgentInfo.push(libInfo); } // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299) - const runtimeInfo = `(NODE-VERSION ${process.version}; ${os.type()} ${os.release()})`; + var runtimeInfo = "(NODE-VERSION " + process.version + "; " + os.type() + " " + os.release() + ")"; if (userAgentInfo.indexOf(runtimeInfo) === -1) { userAgentInfo.push(runtimeInfo); } @@ -29897,13 +31037,14 @@ class TelemetryPolicyFactory { * @param nextPolicy - * @param options - */ - create(nextPolicy, options) { + TelemetryPolicyFactory.prototype.create = function (nextPolicy, options) { return new TelemetryPolicy(nextPolicy, options, this.telemetryString); - } -} + }; + return TelemetryPolicyFactory; +}()); // Copyright (c) Microsoft Corporation. -const _defaultHttpClient = new coreHttp.DefaultHttpClient(); +var _defaultHttpClient = new coreHttp.DefaultHttpClient(); function getCachedDefaultHttpClient() { return _defaultHttpClient; } @@ -29917,18 +31058,19 @@ function getCachedDefaultHttpClient() { * Refer to {@link newPipeline} and provided policies before implementing your * customized Pipeline. */ -class Pipeline { +var Pipeline = /** @class */ (function () { /** * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. * * @param factories - * @param options - */ - constructor(factories, options = {}) { + function Pipeline(factories, options) { + if (options === void 0) { options = {}; } this.factories = factories; // when options.httpClient is not specified, passing in a DefaultHttpClient instance to // avoid each client creating its own http client. - this.options = Object.assign(Object.assign({}, options), { httpClient: options.httpClient || getCachedDefaultHttpClient() }); + this.options = tslib.__assign(tslib.__assign({}, options), { httpClient: options.httpClient || getCachedDefaultHttpClient() }); } /** * Transfer Pipeline object to ServiceClientOptions object which is required by @@ -29936,13 +31078,14 @@ class Pipeline { * * @returns The ServiceClientOptions object from this Pipeline. */ - toServiceClientOptions() { + Pipeline.prototype.toServiceClientOptions = function () { return { httpClient: this.options.httpClient, requestPolicyFactories: this.factories }; - } -} + }; + return Pipeline; +}()); /** * Creates a new Pipeline object with Credential provided. * @@ -29950,15 +31093,16 @@ class Pipeline { * @param pipelineOptions - Optional. Options. * @returns A new Pipeline object. */ -function newPipeline(credential, pipelineOptions = {}) { +function newPipeline(credential, pipelineOptions) { + if (pipelineOptions === void 0) { pipelineOptions = {}; } if (credential === undefined) { credential = new AnonymousCredential(); } // Order is important. Closer to the API at the top & closer to the network at the bottom. // The credential's policy factory must appear close to the wire so it can sign any // changes made by other factories (like UniqueRequestIDPolicyFactory) - const telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions); - const factories = [ + var telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions); + var factories = [ coreHttp.tracingPolicy({ userAgent: telemetryPolicy.telemetryString }), coreHttp.keepAlivePolicy(pipelineOptions.keepAliveOptions), telemetryPolicy, @@ -29990,28 +31134,30 @@ function newPipeline(credential, pipelineOptions = {}) { /** * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key. */ -class StorageSharedKeyCredentialPolicy extends CredentialPolicy { +var StorageSharedKeyCredentialPolicy = /** @class */ (function (_super) { + tslib.__extends(StorageSharedKeyCredentialPolicy, _super); /** * Creates an instance of StorageSharedKeyCredentialPolicy. * @param nextPolicy - * @param options - * @param factory - */ - constructor(nextPolicy, options, factory) { - super(nextPolicy, options); - this.factory = factory; + function StorageSharedKeyCredentialPolicy(nextPolicy, options, factory) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.factory = factory; + return _this; } /** * Signs request. * * @param request - */ - signRequest(request) { + StorageSharedKeyCredentialPolicy.prototype.signRequest = function (request) { request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); if (request.body && typeof request.body === "string" && request.body.length > 0) { request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); } - const stringToSign = [ + var stringToSign = [ request.method.toUpperCase(), this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), @@ -30028,14 +31174,14 @@ class StorageSharedKeyCredentialPolicy extends CredentialPolicy { "\n" + this.getCanonicalizedHeadersString(request) + this.getCanonicalizedResourceString(request); - const signature = this.factory.computeHMACSHA256(stringToSign); - request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); + var signature = this.factory.computeHMACSHA256(stringToSign); + request.headers.set(HeaderConstants.AUTHORIZATION, "SharedKey " + this.factory.accountName + ":" + signature); // console.log(`[URL]:${request.url}`); // console.log(`[HEADERS]:${request.headers.toString()}`); // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`); // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`); return request; - } + }; /** * Retrieve header value according to shared key sign rules. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key @@ -30043,8 +31189,8 @@ class StorageSharedKeyCredentialPolicy extends CredentialPolicy { * @param request - * @param headerName - */ - getHeaderValueToSign(request, headerName) { - const value = request.headers.get(headerName); + StorageSharedKeyCredentialPolicy.prototype.getHeaderValueToSign = function (request, headerName) { + var value = request.headers.get(headerName); if (!value) { return ""; } @@ -30055,7 +31201,7 @@ class StorageSharedKeyCredentialPolicy extends CredentialPolicy { return ""; } return value; - } + }; /** * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. @@ -30069,56 +31215,58 @@ class StorageSharedKeyCredentialPolicy extends CredentialPolicy { * * @param request - */ - getCanonicalizedHeadersString(request) { - let headersArray = request.headers.headersArray().filter((value) => { + StorageSharedKeyCredentialPolicy.prototype.getCanonicalizedHeadersString = function (request) { + var headersArray = request.headers.headersArray().filter(function (value) { return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE); }); - headersArray.sort((a, b) => { + headersArray.sort(function (a, b) { return a.name.toLowerCase().localeCompare(b.name.toLowerCase()); }); // Remove duplicate headers - headersArray = headersArray.filter((value, index, array) => { + headersArray = headersArray.filter(function (value, index, array) { if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { return false; } return true; }); - let canonicalizedHeadersStringToSign = ""; - headersArray.forEach((header) => { - canonicalizedHeadersStringToSign += `${header.name + var canonicalizedHeadersStringToSign = ""; + headersArray.forEach(function (header) { + canonicalizedHeadersStringToSign += header.name .toLowerCase() - .trimRight()}:${header.value.trimLeft()}\n`; + .trimRight() + ":" + header.value.trimLeft() + "\n"; }); return canonicalizedHeadersStringToSign; - } + }; /** * Retrieves the webResource canonicalized resource string. * * @param request - */ - getCanonicalizedResourceString(request) { - const path = getURLPath(request.url) || "/"; - let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path}`; - const queries = getURLQueries(request.url); - const lowercaseQueries = {}; + StorageSharedKeyCredentialPolicy.prototype.getCanonicalizedResourceString = function (request) { + var path = getURLPath(request.url) || "/"; + var canonicalizedResourceString = ""; + canonicalizedResourceString += "/" + this.factory.accountName + path; + var queries = getURLQueries(request.url); + var lowercaseQueries = {}; if (queries) { - const queryKeys = []; - for (const key in queries) { + var queryKeys = []; + for (var key in queries) { if (queries.hasOwnProperty(key)) { - const lowercaseKey = key.toLowerCase(); + var lowercaseKey = key.toLowerCase(); lowercaseQueries[lowercaseKey] = queries[key]; queryKeys.push(lowercaseKey); } } queryKeys.sort(); - for (const key of queryKeys) { - canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`; + for (var _i = 0, queryKeys_1 = queryKeys; _i < queryKeys_1.length; _i++) { + var key = queryKeys_1[_i]; + canonicalizedResourceString += "\n" + key + ":" + decodeURIComponent(lowercaseQueries[key]); } } return canonicalizedResourceString; - } -} + }; + return StorageSharedKeyCredentialPolicy; +}(CredentialPolicy)); // Copyright (c) Microsoft Corporation. /** @@ -30126,16 +31274,18 @@ class StorageSharedKeyCredentialPolicy extends CredentialPolicy { * * StorageSharedKeyCredential for account key authorization of Azure Storage service. */ -class StorageSharedKeyCredential extends Credential { +var StorageSharedKeyCredential = /** @class */ (function (_super) { + tslib.__extends(StorageSharedKeyCredential, _super); /** * Creates an instance of StorageSharedKeyCredential. * @param accountName - * @param accountKey - */ - constructor(accountName, accountKey) { - super(); - this.accountName = accountName; - this.accountKey = Buffer.from(accountKey, "base64"); + function StorageSharedKeyCredential(accountName, accountKey) { + var _this = _super.call(this) || this; + _this.accountName = accountName; + _this.accountKey = Buffer.from(accountKey, "base64"); + return _this; } /** * Creates a StorageSharedKeyCredentialPolicy object. @@ -30143,20 +31293,21 @@ class StorageSharedKeyCredential extends Credential { * @param nextPolicy - * @param options - */ - create(nextPolicy, options) { + StorageSharedKeyCredential.prototype.create = function (nextPolicy, options) { return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this); - } + }; /** * Generates a hash signature for an HTTP request or for a SAS. * * @param stringToSign - */ - computeHMACSHA256(stringToSign) { + StorageSharedKeyCredential.prototype.computeHMACSHA256 = function (stringToSign) { return crypto.createHmac("sha256", this.accountKey) .update(stringToSign, "utf8") .digest("base64"); - } -} + }; + return StorageSharedKeyCredential; +}(Credential)); /* * Copyright (c) Microsoft Corporation. @@ -30165,16 +31316,18 @@ class StorageSharedKeyCredential extends Credential { * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -const packageName = "azure-storage-blob"; -const packageVersion = "12.6.0-beta.1"; -class StorageClientContext extends coreHttp.ServiceClient { +var packageName = "azure-storage-blob"; +var packageVersion = "12.6.0-beta.1"; +var StorageClientContext = /** @class */ (function (_super) { + tslib.__extends(StorageClientContext, _super); /** * Initializes a new instance of the StorageClientContext class. * @param url The URL of the service account, container, or blob that is the targe of the desired * operation. * @param options The parameter options */ - constructor(url, options) { + function StorageClientContext(url, options) { + var _this = this; if (url === undefined) { throw new Error("'url' cannot be null"); } @@ -30183,31 +31336,33 @@ class StorageClientContext extends coreHttp.ServiceClient { options = {}; } if (!options.userAgent) { - const defaultUserAgent = coreHttp.getDefaultUserAgentValue(); - options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; + var defaultUserAgent = coreHttp.getDefaultUserAgentValue(); + options.userAgent = packageName + "/" + packageVersion + " " + defaultUserAgent; } - super(undefined, options); - this.requestContentType = "application/json; charset=utf-8"; - this.baseUri = options.endpoint || "{url}"; + _this = _super.call(this, undefined, options) || this; + _this.requestContentType = "application/json; charset=utf-8"; + _this.baseUri = options.endpoint || "{url}"; // Parameter assignments - this.url = url; + _this.url = url; // Assigning values to Constant parameters - this.version = options.version || "2020-08-04"; + _this.version = options.version || "2020-08-04"; + return _this; } -} + return StorageClientContext; +}(coreHttp.ServiceClient)); // Copyright (c) Microsoft Corporation. /** * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient} * and etc. */ -class StorageClient { +var StorageClient = /** @class */ (function () { /** * Creates an instance of StorageClient. * @param url - url to resource * @param pipeline - request policy pipeline. */ - constructor(url, pipeline) { + function StorageClient(url, pipeline) { // URL should be encoded and only once, protocol layer shouldn't encode URL again this.url = escapeURLPath(url); this.accountName = getAccountNameFromUrl(url); @@ -30215,7 +31370,8 @@ class StorageClient { this.storageClientContext = new StorageClientContext(this.url, pipeline.toServiceClientOptions()); this.isHttps = iEqual(getURLScheme(this.url) || "", "https"); this.credential = new AnonymousCredential(); - for (const factory of this.pipeline.factories) { + for (var _i = 0, _a = this.pipeline.factories; _i < _a.length; _i++) { + var factory = _a[_i]; if ((coreHttp.isNode && factory instanceof StorageSharedKeyCredential) || factory instanceof AnonymousCredential) { this.credential = factory; @@ -30227,17 +31383,18 @@ class StorageClient { } } // Override protocol layer's default content-type - const storageClientContext = this.storageClientContext; + var storageClientContext = this.storageClientContext; storageClientContext.requestContentType = undefined; } -} + return StorageClient; +}()); // Copyright (c) Microsoft Corporation. /** * Creates a span using the global tracer. * @internal */ -const createSpan = coreTracing.createSpanFunction({ +var createSpan = coreTracing.createSpanFunction({ packagePrefix: "Azure.Storage.Blob", namespace: "Microsoft.Storage" }); @@ -30267,8 +31424,8 @@ function convertTracingToRequestOptionsBase(options) { * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but * the order of the permissions is particular and this class guarantees correctness. */ -class BlobSASPermissions { - constructor() { +var BlobSASPermissions = /** @class */ (function () { + function BlobSASPermissions() { /** * Specifies Read access granted. */ @@ -30312,9 +31469,10 @@ class BlobSASPermissions { * * @param permissions - */ - static parse(permissions) { - const blobSASPermissions = new BlobSASPermissions(); - for (const char of permissions) { + BlobSASPermissions.parse = function (permissions) { + var blobSASPermissions = new BlobSASPermissions(); + for (var _i = 0, permissions_1 = permissions; _i < permissions_1.length; _i++) { + var char = permissions_1[_i]; switch (char) { case "r": blobSASPermissions.read = true; @@ -30344,19 +31502,19 @@ class BlobSASPermissions { blobSASPermissions.execute = true; break; default: - throw new RangeError(`Invalid permission: ${char}`); + throw new RangeError("Invalid permission: " + char); } } return blobSASPermissions; - } + }; /** * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it * and boolean values for them. * * @param permissionLike - */ - static from(permissionLike) { - const blobSASPermissions = new BlobSASPermissions(); + BlobSASPermissions.from = function (permissionLike) { + var blobSASPermissions = new BlobSASPermissions(); if (permissionLike.read) { blobSASPermissions.read = true; } @@ -30385,15 +31543,15 @@ class BlobSASPermissions { blobSASPermissions.execute = true; } return blobSASPermissions; - } + }; /** * Converts the given permissions to a string. Using this method will guarantee the permissions are in an * order accepted by the service. * * @returns A string which represents the BlobSASPermissions */ - toString() { - const permissions = []; + BlobSASPermissions.prototype.toString = function () { + var permissions = []; if (this.read) { permissions.push("r"); } @@ -30422,8 +31580,9 @@ class BlobSASPermissions { permissions.push("e"); } return permissions.join(""); - } -} + }; + return BlobSASPermissions; +}()); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. @@ -30434,8 +31593,8 @@ class BlobSASPermissions { * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but * the order of the permissions is particular and this class guarantees correctness. */ -class ContainerSASPermissions { - constructor() { +var ContainerSASPermissions = /** @class */ (function () { + function ContainerSASPermissions() { /** * Specifies Read access granted. */ @@ -30483,9 +31642,10 @@ class ContainerSASPermissions { * * @param permissions - */ - static parse(permissions) { - const containerSASPermissions = new ContainerSASPermissions(); - for (const char of permissions) { + ContainerSASPermissions.parse = function (permissions) { + var containerSASPermissions = new ContainerSASPermissions(); + for (var _i = 0, permissions_1 = permissions; _i < permissions_1.length; _i++) { + var char = permissions_1[_i]; switch (char) { case "r": containerSASPermissions.read = true; @@ -30518,19 +31678,19 @@ class ContainerSASPermissions { containerSASPermissions.execute = true; break; default: - throw new RangeError(`Invalid permission ${char}`); + throw new RangeError("Invalid permission " + char); } } return containerSASPermissions; - } + }; /** * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it * and boolean values for them. * * @param permissionLike - */ - static from(permissionLike) { - const containerSASPermissions = new ContainerSASPermissions(); + ContainerSASPermissions.from = function (permissionLike) { + var containerSASPermissions = new ContainerSASPermissions(); if (permissionLike.read) { containerSASPermissions.read = true; } @@ -30562,7 +31722,7 @@ class ContainerSASPermissions { containerSASPermissions.execute = true; } return containerSASPermissions; - } + }; /** * Converts the given permissions to a string. Using this method will guarantee the permissions are in an * order accepted by the service. @@ -30571,8 +31731,8 @@ class ContainerSASPermissions { * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas * */ - toString() { - const permissions = []; + ContainerSASPermissions.prototype.toString = function () { + var permissions = []; if (this.read) { permissions.push("r"); } @@ -30604,8 +31764,9 @@ class ContainerSASPermissions { permissions.push("e"); } return permissions.join(""); - } -} + }; + return ContainerSASPermissions; +}()); // Copyright (c) Microsoft Corporation. /** @@ -30614,13 +31775,13 @@ class ContainerSASPermissions { * UserDelegationKeyCredential is only used for generation of user delegation SAS. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas */ -class UserDelegationKeyCredential { +var UserDelegationKeyCredential = /** @class */ (function () { /** * Creates an instance of UserDelegationKeyCredential. * @param accountName - * @param userDelegationKey - */ - constructor(accountName, userDelegationKey) { + function UserDelegationKeyCredential(accountName, userDelegationKey) { this.accountName = accountName; this.userDelegationKey = userDelegationKey; this.key = Buffer.from(userDelegationKey.value, "base64"); @@ -30630,13 +31791,14 @@ class UserDelegationKeyCredential { * * @param stringToSign - */ - computeHMACSHA256(stringToSign) { + UserDelegationKeyCredential.prototype.computeHMACSHA256 = function (stringToSign) { // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`); return crypto.createHmac("sha256", this.key) .update(stringToSign, "utf8") .digest("base64"); - } -} + }; + return UserDelegationKeyCredential; +}()); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. @@ -30648,7 +31810,7 @@ class UserDelegationKeyCredential { * @param ipRange - */ function ipRangeToString(ipRange) { - return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start; + return ipRange.end ? ipRange.start + "-" + ipRange.end : ipRange.start; } // Copyright (c) Microsoft Corporation. @@ -30671,8 +31833,8 @@ function ipRangeToString(ipRange) { * * NOTE: Instances of this class are immutable. */ -class SASQueryParameters { - constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId) { +var SASQueryParameters = /** @class */ (function () { + function SASQueryParameters(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId) { this.version = version; this.signature = signature; if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== "string") { @@ -30729,26 +31891,30 @@ class SASQueryParameters { } } } - /** - * Optional. IP range allowed for this SAS. - * - * @readonly - */ - get ipRange() { - if (this.ipRangeInner) { - return { - end: this.ipRangeInner.end, - start: this.ipRangeInner.start - }; - } - return undefined; - } + Object.defineProperty(SASQueryParameters.prototype, "ipRange", { + /** + * Optional. IP range allowed for this SAS. + * + * @readonly + */ + get: function () { + if (this.ipRangeInner) { + return { + end: this.ipRangeInner.end, + start: this.ipRangeInner.start + }; + } + return undefined; + }, + enumerable: false, + configurable: true + }); /** * Encodes all SAS query parameters into a string that can be appended to a URL. * */ - toString() { - const params = [ + SASQueryParameters.prototype.toString = function () { + var params = [ "sv", "ss", "srt", @@ -30774,8 +31940,9 @@ class SASQueryParameters { "saoid", "scid" ]; - const queries = []; - for (const param of params) { + var queries = []; + for (var _i = 0, params_1 = params; _i < params_1.length; _i++) { + var param = params_1[_i]; switch (param) { case "sv": this.tryAppendQueryParameter(queries, param, this.version); @@ -30852,7 +32019,7 @@ class SASQueryParameters { } } return queries.join("&"); - } + }; /** * A private helper method used to filter and append query key/value pairs into an array. * @@ -30860,25 +32027,26 @@ class SASQueryParameters { * @param key - * @param value - */ - tryAppendQueryParameter(queries, key, value) { + SASQueryParameters.prototype.tryAppendQueryParameter = function (queries, key, value) { if (!value) { return; } key = encodeURIComponent(key); value = encodeURIComponent(value); if (key.length > 0 && value.length > 0) { - queries.push(`${key}=${value}`); + queries.push(key + "=" + value); } - } -} + }; + return SASQueryParameters; +}()); // Copyright (c) Microsoft Corporation. function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { - const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; - const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential + var version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + var sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential ? sharedKeyCredentialOrUserDelegationKey : undefined; - let userDelegationKeyCredential; + var userDelegationKeyCredential; if (sharedKeyCredential === undefined && accountName !== undefined) { userDelegationKeyCredential = new UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey); } @@ -30934,12 +32102,12 @@ function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKe !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); } - let resource = "c"; + var resource = "c"; if (blobSASSignatureValues.blobName) { resource = "b"; } // Calling parse and toString guarantees the proper ordering and throws on invalid characters. - let verifiedPermissions; + var verifiedPermissions; if (blobSASSignatureValues.permissions) { if (blobSASSignatureValues.blobName) { verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); @@ -30949,7 +32117,7 @@ function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKe } } // Signature is generated on the un-url-encoded values. - const stringToSign = [ + var stringToSign = [ verifiedPermissions ? verifiedPermissions : "", blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) @@ -30968,7 +32136,7 @@ function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKe blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" ].join("\n"); - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + var signature = sharedKeyCredential.computeHMACSHA256(stringToSign); return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); } /** @@ -30993,8 +32161,8 @@ function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKe !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; + var resource = "c"; + var timestamp = blobSASSignatureValues.snapshotTime; if (blobSASSignatureValues.blobName) { resource = "b"; if (blobSASSignatureValues.snapshotTime) { @@ -31006,7 +32174,7 @@ function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKe } } // Calling parse and toString guarantees the proper ordering and throws on invalid characters. - let verifiedPermissions; + var verifiedPermissions; if (blobSASSignatureValues.permissions) { if (blobSASSignatureValues.blobName) { verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); @@ -31016,7 +32184,7 @@ function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKe } } // Signature is generated on the un-url-encoded values. - const stringToSign = [ + var stringToSign = [ verifiedPermissions ? verifiedPermissions : "", blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) @@ -31037,7 +32205,7 @@ function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKe blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" ].join("\n"); - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + var signature = sharedKeyCredential.computeHMACSHA256(stringToSign); return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); } /** @@ -31060,8 +32228,8 @@ function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userD if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; + var resource = "c"; + var timestamp = blobSASSignatureValues.snapshotTime; if (blobSASSignatureValues.blobName) { resource = "b"; if (blobSASSignatureValues.snapshotTime) { @@ -31073,7 +32241,7 @@ function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userD } } // Calling parse and toString guarantees the proper ordering and throws on invalid characters. - let verifiedPermissions; + var verifiedPermissions; if (blobSASSignatureValues.permissions) { if (blobSASSignatureValues.blobName) { verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); @@ -31083,7 +32251,7 @@ function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userD } } // Signature is generated on the un-url-encoded values. - const stringToSign = [ + var stringToSign = [ verifiedPermissions ? verifiedPermissions : "", blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) @@ -31113,7 +32281,7 @@ function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userD blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType ].join("\n"); - const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + var signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey); } /** @@ -31136,8 +32304,8 @@ function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userD if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; + var resource = "c"; + var timestamp = blobSASSignatureValues.snapshotTime; if (blobSASSignatureValues.blobName) { resource = "b"; if (blobSASSignatureValues.snapshotTime) { @@ -31149,7 +32317,7 @@ function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userD } } // Calling parse and toString guarantees the proper ordering and throws on invalid characters. - let verifiedPermissions; + var verifiedPermissions; if (blobSASSignatureValues.permissions) { if (blobSASSignatureValues.blobName) { verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); @@ -31159,7 +32327,7 @@ function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userD } } // Signature is generated on the un-url-encoded values. - const stringToSign = [ + var stringToSign = [ verifiedPermissions ? verifiedPermissions : "", blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) @@ -31192,20 +32360,20 @@ function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userD blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType ].join("\n"); - const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + var signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId); } function getCanonicalName(accountName, containerName, blobName) { // Container: "/blob/account/containerName" // Blob: "/blob/account/containerName/blobName" - const elements = [`/blob/${accountName}/${containerName}`]; + var elements = ["/blob/" + accountName + "/" + containerName]; if (blobName) { - elements.push(`/${blobName}`); + elements.push("/" + blobName); } return elements.join(""); } function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { - const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + var version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; if (blobSASSignatureValues.snapshotTime && version < "2018-11-09") { throw RangeError("'version' must be >= '2018-11-09' when providing 'snapshotTime'."); } @@ -31241,18 +32409,17 @@ function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { return blobSASSignatureValues; } -// Copyright (c) Microsoft Corporation. /** * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}. */ -class BlobLeaseClient { +var BlobLeaseClient = /** @class */ (function () { /** * Creates an instance of BlobLeaseClient. * @param client - The client to make the lease operation requests. * @param leaseId - Initial proposed lease id. */ - constructor(client, leaseId) { - const clientContext = new StorageClientContext(client.url, client.pipeline.toServiceClientOptions()); + function BlobLeaseClient(client, leaseId) { + var clientContext = new StorageClientContext(client.url, client.pipeline.toServiceClientOptions()); this._url = client.url; if (client.name === undefined) { this._isContainer = true; @@ -31267,22 +32434,30 @@ class BlobLeaseClient { } this._leaseId = leaseId; } - /** - * Gets the lease Id. - * - * @readonly - */ - get leaseId() { - return this._leaseId; - } - /** - * Gets the url. - * - * @readonly - */ - get url() { - return this._url; - } + Object.defineProperty(BlobLeaseClient.prototype, "leaseId", { + /** + * Gets the lease Id. + * + * @readonly + */ + get: function () { + return this._leaseId; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobLeaseClient.prototype, "url", { + /** + * Gets the url. + * + * @readonly + */ + get: function () { + return this._url; + }, + enumerable: false, + configurable: true + }); /** * Establishes and manages a lock on a container for delete operations, or on a blob * for write and delete operations. @@ -31295,29 +32470,41 @@ class BlobLeaseClient { * @param options - option to configure lease management operations. * @returns Response data for acquire lease operation. */ - async acquireLease(duration, options = {}) { + BlobLeaseClient.prototype.acquireLease = function (duration, options) { var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-acquireLease", options); - if (this._isContainer && - ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || - (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || - ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); - } - try { - return await this._containerOrBlobOperation.acquireLease(Object.assign({ abortSignal: options.abortSignal, duration, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }), proposedLeaseId: this._leaseId }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _g, span, updatedOptions, e_1; + return tslib.__generator(this, function (_h) { + switch (_h.label) { + case 0: + _g = createSpan("BlobLeaseClient-acquireLease", options), span = _g.span, updatedOptions = _g.updatedOptions; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + _h.label = 1; + case 1: + _h.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this._containerOrBlobOperation.acquireLease(tslib.__assign({ abortSignal: options.abortSignal, duration: duration, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }), proposedLeaseId: this._leaseId }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _h.sent()]; + case 3: + e_1 = _h.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_1.message + }); + throw e_1; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * To change the ID of the lease. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container @@ -31328,31 +32515,44 @@ class BlobLeaseClient { * @param options - option to configure lease management operations. * @returns Response data for change lease operation. */ - async changeLease(proposedLeaseId, options = {}) { + BlobLeaseClient.prototype.changeLease = function (proposedLeaseId, options) { var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-changeLease", options); - if (this._isContainer && - ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || - (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || - ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); - } - try { - const response = await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - this._leaseId = proposedLeaseId; - return response; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _g, span, updatedOptions, response, e_2; + return tslib.__generator(this, function (_h) { + switch (_h.label) { + case 0: + _g = createSpan("BlobLeaseClient-changeLease", options), span = _g.span, updatedOptions = _g.updatedOptions; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + _h.label = 1; + case 1: + _h.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, tslib.__assign({ abortSignal: options.abortSignal, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: + response = _h.sent(); + this._leaseId = proposedLeaseId; + return [2 /*return*/, response]; + case 3: + e_2 = _h.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_2.message + }); + throw e_2; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * To free the lease if it is no longer needed so that another client may * immediately acquire a lease against the container or the blob. @@ -31363,29 +32563,41 @@ class BlobLeaseClient { * @param options - option to configure lease management operations. * @returns Response data for release lease operation. */ - async releaseLease(options = {}) { + BlobLeaseClient.prototype.releaseLease = function (options) { var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-releaseLease", options); - if (this._isContainer && - ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || - (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || - ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); - } - try { - return await this._containerOrBlobOperation.releaseLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _g, span, updatedOptions, e_3; + return tslib.__generator(this, function (_h) { + switch (_h.label) { + case 0: + _g = createSpan("BlobLeaseClient-releaseLease", options), span = _g.span, updatedOptions = _g.updatedOptions; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + _h.label = 1; + case 1: + _h.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this._containerOrBlobOperation.releaseLease(this._leaseId, tslib.__assign({ abortSignal: options.abortSignal, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _h.sent()]; + case 3: + e_3 = _h.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_3.message + }); + throw e_3; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * To renew the lease. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container @@ -31395,29 +32607,41 @@ class BlobLeaseClient { * @param options - Optional option to configure lease management operations. * @returns Response data for renew lease operation. */ - async renewLease(options = {}) { + BlobLeaseClient.prototype.renewLease = function (options) { var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-renewLease", options); - if (this._isContainer && - ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || - (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || - ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); - } - try { - return await this._containerOrBlobOperation.renewLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _g, span, updatedOptions, e_4; + return tslib.__generator(this, function (_h) { + switch (_h.label) { + case 0: + _g = createSpan("BlobLeaseClient-renewLease", options), span = _g.span, updatedOptions = _g.updatedOptions; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + _h.label = 1; + case 1: + _h.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this._containerOrBlobOperation.renewLease(this._leaseId, tslib.__assign({ abortSignal: options.abortSignal, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _h.sent()]; + case 3: + e_4 = _h.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_4.message + }); + throw e_4; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * To end the lease but ensure that another client cannot acquire a new lease * until the current lease period has expired. @@ -31429,31 +32653,44 @@ class BlobLeaseClient { * @param options - Optional options to configure lease management operations. * @returns Response data for break lease operation. */ - async breakLease(breakPeriod, options = {}) { + BlobLeaseClient.prototype.breakLease = function (breakPeriod, options) { var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-breakLease", options); - if (this._isContainer && - ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || - (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || - ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); - } - try { - const operationOptions = Object.assign({ abortSignal: options.abortSignal, breakPeriod, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)); - return await this._containerOrBlobOperation.breakLease(operationOptions); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _g, span, updatedOptions, operationOptions, e_5; + return tslib.__generator(this, function (_h) { + switch (_h.label) { + case 0: + _g = createSpan("BlobLeaseClient-breakLease", options), span = _g.span, updatedOptions = _g.updatedOptions; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + _h.label = 1; + case 1: + _h.trys.push([1, 3, 4, 5]); + operationOptions = tslib.__assign({ abortSignal: options.abortSignal, breakPeriod: breakPeriod, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)); + return [4 /*yield*/, this._containerOrBlobOperation.breakLease(operationOptions)]; + case 2: return [2 /*return*/, _h.sent()]; + case 3: + e_5 = _h.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_5.message + }); + throw e_5; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } -} + }); + }; + return BlobLeaseClient; +}()); // Copyright (c) Microsoft Corporation. /** @@ -31461,7 +32698,8 @@ class BlobLeaseClient { * * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends. */ -class RetriableReadableStream extends stream.Readable { +var RetriableReadableStream = /** @class */ (function (_super) { + tslib.__extends(RetriableReadableStream, _super); /** * Creates an instance of RetriableReadableStream. * @@ -31472,31 +32710,32 @@ class RetriableReadableStream extends stream.Readable { * @param count - How much data in original data source to read * @param options - */ - constructor(source, getter, offset, count, options = {}) { - super({ highWaterMark: options.highWaterMark }); - this.retries = 0; - this.sourceDataHandler = (data) => { - if (this.options.doInjectErrorOnce) { - this.options.doInjectErrorOnce = undefined; - this.source.pause(); - this.source.removeAllListeners("data"); - this.source.emit("end"); + function RetriableReadableStream(source, getter, offset, count, options) { + if (options === void 0) { options = {}; } + var _this = _super.call(this, { highWaterMark: options.highWaterMark }) || this; + _this.retries = 0; + _this.sourceDataHandler = function (data) { + if (_this.options.doInjectErrorOnce) { + _this.options.doInjectErrorOnce = undefined; + _this.source.pause(); + _this.source.removeAllListeners("data"); + _this.source.emit("end"); return; } // console.log( // `Offset: ${this.offset}, Received ${data.length} from internal stream` // ); - this.offset += data.length; - if (this.onProgress) { - this.onProgress({ loadedBytes: this.offset - this.start }); + _this.offset += data.length; + if (_this.onProgress) { + _this.onProgress({ loadedBytes: _this.offset - _this.start }); } - if (!this.push(data)) { - this.source.pause(); + if (!_this.push(data)) { + _this.source.pause(); } }; - this.sourceErrorOrEndHandler = (err) => { + _this.sourceErrorOrEndHandler = function (err) { if (err && err.name === "AbortError") { - this.destroy(err); + _this.destroy(err); return; } // console.log( @@ -31504,67 +32743,69 @@ class RetriableReadableStream extends stream.Readable { // this.offset // }, dest end : ${this.end}` // ); - this.removeSourceEventHandlers(); - if (this.offset - 1 === this.end) { - this.push(null); + _this.removeSourceEventHandlers(); + if (_this.offset - 1 === _this.end) { + _this.push(null); } - else if (this.offset <= this.end) { + else if (_this.offset <= _this.end) { // console.log( // `retries: ${this.retries}, max retries: ${this.maxRetries}` // ); - if (this.retries < this.maxRetryRequests) { - this.retries += 1; - this.getter(this.offset) - .then((newSource) => { - this.source = newSource; - this.setSourceEventHandlers(); + if (_this.retries < _this.maxRetryRequests) { + _this.retries += 1; + _this.getter(_this.offset) + .then(function (newSource) { + _this.source = newSource; + _this.setSourceEventHandlers(); }) - .catch((error) => { - this.destroy(error); + .catch(function (error) { + _this.destroy(error); }); } else { - this.destroy(new Error( + _this.destroy(new Error( // tslint:disable-next-line:max-line-length - `Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this - .offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); + "Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: " + (_this + .offset - 1) + ", data needed offset: " + _this.end + ", retries: " + _this.retries + ", max retries: " + _this.maxRetryRequests)); } } else { - this.destroy(new Error(`Data corruption failure: Received more data than original request, data needed offset is ${this.end}, received offset: ${this.offset - 1}`)); + _this.destroy(new Error("Data corruption failure: Received more data than original request, data needed offset is " + _this.end + ", received offset: " + (_this.offset - 1))); } }; - this.getter = getter; - this.source = source; - this.start = offset; - this.offset = offset; - this.end = offset + count - 1; - this.maxRetryRequests = + _this.getter = getter; + _this.source = source; + _this.start = offset; + _this.offset = offset; + _this.end = offset + count - 1; + _this.maxRetryRequests = options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0; - this.onProgress = options.onProgress; - this.options = options; - this.setSourceEventHandlers(); + _this.onProgress = options.onProgress; + _this.options = options; + _this.setSourceEventHandlers(); + return _this; } - _read() { + RetriableReadableStream.prototype._read = function () { this.source.resume(); - } - setSourceEventHandlers() { + }; + RetriableReadableStream.prototype.setSourceEventHandlers = function () { this.source.on("data", this.sourceDataHandler); this.source.on("end", this.sourceErrorOrEndHandler); this.source.on("error", this.sourceErrorOrEndHandler); - } - removeSourceEventHandlers() { + }; + RetriableReadableStream.prototype.removeSourceEventHandlers = function () { this.source.removeListener("data", this.sourceDataHandler); this.source.removeListener("end", this.sourceErrorOrEndHandler); this.source.removeListener("error", this.sourceErrorOrEndHandler); - } - _destroy(error, callback) { + }; + RetriableReadableStream.prototype._destroy = function (error, callback) { // remove listener from source and release source this.removeSourceEventHandlers(); this.source.destroy(); callback(error === null ? undefined : error); - } -} + }; + return RetriableReadableStream; +}(stream.Readable)); // Copyright (c) Microsoft Corporation. /** @@ -31577,7 +32818,7 @@ class RetriableReadableStream extends stream.Readable { * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js * Readable stream. */ -class BlobDownloadResponse { +var BlobDownloadResponse = /** @class */ (function () { /** * Creates an instance of BlobDownloadResponse. * @@ -31587,419 +32828,593 @@ class BlobDownloadResponse { * @param count - * @param options - */ - constructor(originalResponse, getter, offset, count, options = {}) { + function BlobDownloadResponse(originalResponse, getter, offset, count, options) { + if (options === void 0) { options = {}; } this.originalResponse = originalResponse; this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); } - /** - * Indicates that the service supports - * requests for partial file content. - * - * @readonly - */ - get acceptRanges() { - return this.originalResponse.acceptRanges; - } - /** - * Returns if it was previously specified - * for the file. - * - * @readonly - */ - get cacheControl() { - return this.originalResponse.cacheControl; - } - /** - * Returns the value that was specified - * for the 'x-ms-content-disposition' header and specifies how to process the - * response. - * - * @readonly - */ - get contentDisposition() { - return this.originalResponse.contentDisposition; - } - /** - * Returns the value that was specified - * for the Content-Encoding request header. - * - * @readonly - */ - get contentEncoding() { - return this.originalResponse.contentEncoding; - } - /** - * Returns the value that was specified - * for the Content-Language request header. - * - * @readonly - */ - get contentLanguage() { - return this.originalResponse.contentLanguage; - } - /** - * The current sequence number for a - * page blob. This header is not returned for block blobs or append blobs. - * - * @readonly - */ - get blobSequenceNumber() { - return this.originalResponse.blobSequenceNumber; - } - /** - * The blob's type. Possible values include: - * 'BlockBlob', 'PageBlob', 'AppendBlob'. - * - * @readonly - */ - get blobType() { - return this.originalResponse.blobType; - } - /** - * The number of bytes present in the - * response body. - * - * @readonly - */ - get contentLength() { - return this.originalResponse.contentLength; - } - /** - * If the file has an MD5 hash and the - * request is to read the full file, this response header is returned so that - * the client can check for message content integrity. If the request is to - * read a specified range and the 'x-ms-range-get-content-md5' is set to - * true, then the request returns an MD5 hash for the range, as long as the - * range size is less than or equal to 4 MB. If neither of these sets of - * conditions is true, then no value is returned for the 'Content-MD5' - * header. - * - * @readonly - */ - get contentMD5() { - return this.originalResponse.contentMD5; - } - /** - * Indicates the range of bytes returned if - * the client requested a subset of the file by setting the Range request - * header. - * - * @readonly - */ - get contentRange() { - return this.originalResponse.contentRange; - } - /** - * The content type specified for the file. - * The default content type is 'application/octet-stream' - * - * @readonly - */ - get contentType() { - return this.originalResponse.contentType; - } - /** - * Conclusion time of the last attempted - * Copy File operation where this file was the destination file. This value - * can specify the time of a completed, aborted, or failed copy attempt. - * - * @readonly - */ - get copyCompletedOn() { - return this.originalResponse.copyCompletedOn; - } - /** - * String identifier for the last attempted Copy - * File operation where this file was the destination file. - * - * @readonly - */ - get copyId() { - return this.originalResponse.copyId; - } - /** - * Contains the number of bytes copied and - * the total bytes in the source in the last attempted Copy File operation - * where this file was the destination file. Can show between 0 and - * Content-Length bytes copied. - * - * @readonly - */ - get copyProgress() { - return this.originalResponse.copyProgress; - } - /** - * URL up to 2KB in length that specifies the - * source file used in the last attempted Copy File operation where this file - * was the destination file. - * - * @readonly - */ - get copySource() { - return this.originalResponse.copySource; - } - /** - * State of the copy operation - * identified by 'x-ms-copy-id'. Possible values include: 'pending', - * 'success', 'aborted', 'failed' - * - * @readonly - */ - get copyStatus() { - return this.originalResponse.copyStatus; - } - /** - * Only appears when - * x-ms-copy-status is failed or pending. Describes cause of fatal or - * non-fatal copy operation failure. - * - * @readonly - */ - get copyStatusDescription() { - return this.originalResponse.copyStatusDescription; - } - /** - * When a blob is leased, - * specifies whether the lease is of infinite or fixed duration. Possible - * values include: 'infinite', 'fixed'. - * - * @readonly - */ - get leaseDuration() { - return this.originalResponse.leaseDuration; - } - /** - * Lease state of the blob. Possible - * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. - * - * @readonly - */ - get leaseState() { - return this.originalResponse.leaseState; - } - /** - * The current lease status of the - * blob. Possible values include: 'locked', 'unlocked'. - * - * @readonly - */ - get leaseStatus() { - return this.originalResponse.leaseStatus; - } - /** - * A UTC date/time value generated by the service that - * indicates the time at which the response was initiated. - * - * @readonly - */ - get date() { - return this.originalResponse.date; - } - /** - * The number of committed blocks - * present in the blob. This header is returned only for append blobs. - * - * @readonly - */ - get blobCommittedBlockCount() { - return this.originalResponse.blobCommittedBlockCount; - } - /** - * The ETag contains a value that you can use to - * perform operations conditionally, in quotes. - * - * @readonly - */ - get etag() { - return this.originalResponse.etag; - } - /** - * The number of tags associated with the blob - * - * @readonly - */ - get tagCount() { - return this.originalResponse.tagCount; - } - /** - * The error code. - * - * @readonly - */ - get errorCode() { - return this.originalResponse.errorCode; - } - /** - * The value of this header is set to - * true if the file data and application metadata are completely encrypted - * using the specified algorithm. Otherwise, the value is set to false (when - * the file is unencrypted, or if only parts of the file/application metadata - * are encrypted). - * - * @readonly - */ - get isServerEncrypted() { - return this.originalResponse.isServerEncrypted; - } - /** - * If the blob has a MD5 hash, and if - * request contains range header (Range or x-ms-range), this response header - * is returned with the value of the whole blob's MD5 value. This value may - * or may not be equal to the value returned in Content-MD5 header, with the - * latter calculated from the requested range. - * - * @readonly - */ - get blobContentMD5() { - return this.originalResponse.blobContentMD5; - } - /** - * Returns the date and time the file was last - * modified. Any operation that modifies the file or its properties updates - * the last modified time. - * - * @readonly - */ - get lastModified() { - return this.originalResponse.lastModified; - } - /** - * Returns the UTC date and time generated by the service that indicates the time at which the blob was - * last read or written to. - * - * @readonly - */ - get lastAccessed() { - return this.originalResponse.lastAccessed; - } - /** - * A name-value pair - * to associate with a file storage object. - * - * @readonly - */ - get metadata() { - return this.originalResponse.metadata; - } - /** - * This header uniquely identifies the request - * that was made and can be used for troubleshooting the request. - * - * @readonly - */ - get requestId() { - return this.originalResponse.requestId; - } - /** - * If a client request id header is sent in the request, this header will be present in the - * response with the same value. - * - * @readonly - */ - get clientRequestId() { - return this.originalResponse.clientRequestId; - } - /** - * Indicates the version of the Blob service used - * to execute the request. - * - * @readonly - */ - get version() { - return this.originalResponse.version; - } - /** - * Indicates the versionId of the downloaded blob version. - * - * @readonly - */ - get versionId() { - return this.originalResponse.versionId; - } - /** - * Indicates whether version of this blob is a current version. - * - * @readonly - */ - get isCurrentVersion() { - return this.originalResponse.isCurrentVersion; - } - /** - * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned - * when the blob was encrypted with a customer-provided key. - * - * @readonly - */ - get encryptionKeySha256() { - return this.originalResponse.encryptionKeySha256; - } - /** - * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to - * true, then the request returns a crc64 for the range, as long as the range size is less than - * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is - * specified in the same request, it will fail with 400(Bad Request) - */ - get contentCrc64() { - return this.originalResponse.contentCrc64; - } - /** - * Object Replication Policy Id of the destination blob. - * - * @readonly - */ - get objectReplicationDestinationPolicyId() { - return this.originalResponse.objectReplicationDestinationPolicyId; - } - /** - * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. - * - * @readonly - */ - get objectReplicationSourceProperties() { - return this.originalResponse.objectReplicationSourceProperties; - } - /** - * If this blob has been sealed. - * - * @readonly - */ - get isSealed() { - return this.originalResponse.isSealed; - } - /** - * The response body as a browser Blob. - * Always undefined in node.js. - * - * @readonly - */ - get contentAsBlob() { - return this.originalResponse.blobBody; - } - /** - * The response body as a node.js Readable stream. - * Always undefined in the browser. - * - * It will automatically retry when internal read stream unexpected ends. - * - * @readonly - */ - get readableStreamBody() { - return coreHttp.isNode ? this.blobDownloadStream : undefined; - } - /** - * The HTTP response. - */ - get _response() { - return this.originalResponse._response; - } -} + Object.defineProperty(BlobDownloadResponse.prototype, "acceptRanges", { + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get: function () { + return this.originalResponse.acceptRanges; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "cacheControl", { + /** + * Returns if it was previously specified + * for the file. + * + * @readonly + */ + get: function () { + return this.originalResponse.cacheControl; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "contentDisposition", { + /** + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly + */ + get: function () { + return this.originalResponse.contentDisposition; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "contentEncoding", { + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get: function () { + return this.originalResponse.contentEncoding; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "contentLanguage", { + /** + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly + */ + get: function () { + return this.originalResponse.contentLanguage; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "blobSequenceNumber", { + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get: function () { + return this.originalResponse.blobSequenceNumber; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "blobType", { + /** + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly + */ + get: function () { + return this.originalResponse.blobType; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "contentLength", { + /** + * The number of bytes present in the + * response body. + * + * @readonly + */ + get: function () { + return this.originalResponse.contentLength; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "contentMD5", { + /** + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly + */ + get: function () { + return this.originalResponse.contentMD5; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "contentRange", { + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get: function () { + return this.originalResponse.contentRange; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "contentType", { + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get: function () { + return this.originalResponse.contentType; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "copyCompletedOn", { + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly + */ + get: function () { + return this.originalResponse.copyCompletedOn; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "copyId", { + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly + */ + get: function () { + return this.originalResponse.copyId; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "copyProgress", { + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get: function () { + return this.originalResponse.copyProgress; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "copySource", { + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get: function () { + return this.originalResponse.copySource; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "copyStatus", { + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get: function () { + return this.originalResponse.copyStatus; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "copyStatusDescription", { + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get: function () { + return this.originalResponse.copyStatusDescription; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "leaseDuration", { + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly + */ + get: function () { + return this.originalResponse.leaseDuration; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "leaseState", { + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get: function () { + return this.originalResponse.leaseState; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "leaseStatus", { + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get: function () { + return this.originalResponse.leaseStatus; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "date", { + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get: function () { + return this.originalResponse.date; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "blobCommittedBlockCount", { + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get: function () { + return this.originalResponse.blobCommittedBlockCount; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "etag", { + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get: function () { + return this.originalResponse.etag; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "tagCount", { + /** + * The number of tags associated with the blob + * + * @readonly + */ + get: function () { + return this.originalResponse.tagCount; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "errorCode", { + /** + * The error code. + * + * @readonly + */ + get: function () { + return this.originalResponse.errorCode; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "isServerEncrypted", { + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get: function () { + return this.originalResponse.isServerEncrypted; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "blobContentMD5", { + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get: function () { + return this.originalResponse.blobContentMD5; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "lastModified", { + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get: function () { + return this.originalResponse.lastModified; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "lastAccessed", { + /** + * Returns the UTC date and time generated by the service that indicates the time at which the blob was + * last read or written to. + * + * @readonly + */ + get: function () { + return this.originalResponse.lastAccessed; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "metadata", { + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get: function () { + return this.originalResponse.metadata; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "requestId", { + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get: function () { + return this.originalResponse.requestId; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "clientRequestId", { + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get: function () { + return this.originalResponse.clientRequestId; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "version", { + /** + * Indicates the version of the Blob service used + * to execute the request. + * + * @readonly + */ + get: function () { + return this.originalResponse.version; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "versionId", { + /** + * Indicates the versionId of the downloaded blob version. + * + * @readonly + */ + get: function () { + return this.originalResponse.versionId; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "isCurrentVersion", { + /** + * Indicates whether version of this blob is a current version. + * + * @readonly + */ + get: function () { + return this.originalResponse.isCurrentVersion; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "encryptionKeySha256", { + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get: function () { + return this.originalResponse.encryptionKeySha256; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "contentCrc64", { + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get: function () { + return this.originalResponse.contentCrc64; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "objectReplicationDestinationPolicyId", { + /** + * Object Replication Policy Id of the destination blob. + * + * @readonly + */ + get: function () { + return this.originalResponse.objectReplicationDestinationPolicyId; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "objectReplicationSourceProperties", { + /** + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + * + * @readonly + */ + get: function () { + return this.originalResponse.objectReplicationSourceProperties; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "isSealed", { + /** + * If this blob has been sealed. + * + * @readonly + */ + get: function () { + return this.originalResponse.isSealed; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "contentAsBlob", { + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get: function () { + return this.originalResponse.blobBody; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "readableStreamBody", { + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will automatically retry when internal read stream unexpected ends. + * + * @readonly + */ + get: function () { + return coreHttp.isNode ? this.blobDownloadStream : undefined; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "_response", { + /** + * The HTTP response. + */ + get: function () { + return this.originalResponse._response; + }, + enumerable: false, + configurable: true + }); + return BlobDownloadResponse; +}()); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -const AVRO_SYNC_MARKER_SIZE = 16; -const AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]); -const AVRO_CODEC_KEY = "avro.codec"; -const AVRO_SCHEMA_KEY = "avro.schema"; +var AVRO_SYNC_MARKER_SIZE = 16; +var AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]); +var AVRO_CODEC_KEY = "avro.codec"; +var AVRO_SCHEMA_KEY = "avro.schema"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. @@ -32010,7 +33425,7 @@ function arraysEqual(a, b) { return false; if (a.length != b.length) return false; - for (let i = 0; i < a.length; ++i) { + for (var i = 0; i < a.length; ++i) { if (a[i] !== b[i]) return false; } @@ -32018,8 +33433,9 @@ function arraysEqual(a, b) { } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -class AvroParser { +var AvroParser = /** @class */ (function () { + function AvroParser() { + } /** * Reads a fixed number of bytes from the stream. * @@ -32027,135 +33443,294 @@ class AvroParser { * @param length - * @param options - */ - static async readFixedBytes(stream, length, options = {}) { - const bytes = await stream.read(length, { abortSignal: options.abortSignal }); - if (bytes.length != length) { - throw new Error("Hit stream end."); - } - return bytes; - } + AvroParser.readFixedBytes = function (stream, length, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var bytes; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, stream.read(length, { abortSignal: options.abortSignal })]; + case 1: + bytes = _a.sent(); + if (bytes.length != length) { + throw new Error("Hit stream end."); + } + return [2 /*return*/, bytes]; + } + }); + }); + }; /** * Reads a single byte from the stream. * * @param stream - * @param options - */ - static async readByte(stream, options = {}) { - const buf = await AvroParser.readFixedBytes(stream, 1, options); - return buf[0]; - } + AvroParser.readByte = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var buf; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, AvroParser.readFixedBytes(stream, 1, options)]; + case 1: + buf = _a.sent(); + return [2 /*return*/, buf[0]]; + } + }); + }); + }; // int and long are stored in variable-length zig-zag coding. // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types - static async readZigZagLong(stream, options = {}) { - let zigZagEncoded = 0; - let significanceInBit = 0; - let byte, haveMoreByte, significanceInFloat; - do { - byte = await AvroParser.readByte(stream, options); - haveMoreByte = byte & 0x80; - zigZagEncoded |= (byte & 0x7f) << significanceInBit; - significanceInBit += 7; - } while (haveMoreByte && significanceInBit < 28); // bitwise operation only works for 32-bit integers - if (haveMoreByte) { - // Switch to float arithmetic - zigZagEncoded = zigZagEncoded; - significanceInFloat = 268435456; // 2 ** 28. - do { - byte = await AvroParser.readByte(stream, options); - zigZagEncoded += (byte & 0x7f) * significanceInFloat; - significanceInFloat *= 128; // 2 ** 7 - } while (byte & 0x80); - const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2; - if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) { - throw new Error("Integer overflow."); - } - return res; - } - return (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1); - } - static async readLong(stream, options = {}) { - return AvroParser.readZigZagLong(stream, options); - } - static async readInt(stream, options = {}) { - return AvroParser.readZigZagLong(stream, options); - } - static async readNull() { - return null; - } - static async readBoolean(stream, options = {}) { - const b = await AvroParser.readByte(stream, options); - if (b == 1) { - return true; - } - else if (b == 0) { - return false; - } - else { - throw new Error("Byte was not a boolean."); - } - } - static async readFloat(stream, options = {}) { - const u8arr = await AvroParser.readFixedBytes(stream, 4, options); - const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); - return view.getFloat32(0, true); // littleEndian = true - } - static async readDouble(stream, options = {}) { - const u8arr = await AvroParser.readFixedBytes(stream, 8, options); - const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); - return view.getFloat64(0, true); // littleEndian = true - } - static async readBytes(stream, options = {}) { - const size = await AvroParser.readLong(stream, options); - if (size < 0) { - throw new Error("Bytes size was negative."); - } - return await stream.read(size, { abortSignal: options.abortSignal }); - } - static async readString(stream, options = {}) { - const u8arr = await AvroParser.readBytes(stream, options); - // polyfill TextDecoder to be backward compatible with older - // nodejs that doesn't expose TextDecoder as a global variable - if (typeof TextDecoder === "undefined" && "function" !== "undefined") { - global.TextDecoder = __webpack_require__(669).TextDecoder; - } - // FUTURE: need TextDecoder polyfill for IE - const utf8decoder = new TextDecoder(); - return utf8decoder.decode(u8arr); - } - static async readMapPair(stream, readItemMethod, options = {}) { - const key = await AvroParser.readString(stream, options); - // FUTURE: this won't work with readFixed (currently not supported) which needs a length as the parameter. - const value = await readItemMethod(stream, options); - return { key, value }; - } - static async readMap(stream, readItemMethod, options = {}) { - const readPairMethod = async (stream, options = {}) => { - return await AvroParser.readMapPair(stream, readItemMethod, options); - }; - const pairs = await AvroParser.readArray(stream, readPairMethod, options); - const dict = {}; - for (const pair of pairs) { - dict[pair.key] = pair.value; - } - return dict; - } - static async readArray(stream, readItemMethod, options = {}) { - const items = []; - for (let count = await AvroParser.readLong(stream, options); count != 0; count = await AvroParser.readLong(stream, options)) { - if (count < 0) { - // Ignore block sizes - await AvroParser.readLong(stream, options); - count = -count; - } - while (count--) { - const item = await readItemMethod(stream, options); - items.push(item); - } - } - return items; - } -} + AvroParser.readZigZagLong = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var zigZagEncoded, significanceInBit, byte, haveMoreByte, significanceInFloat, res; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + zigZagEncoded = 0; + significanceInBit = 0; + _a.label = 1; + case 1: return [4 /*yield*/, AvroParser.readByte(stream, options)]; + case 2: + byte = _a.sent(); + haveMoreByte = byte & 0x80; + zigZagEncoded |= (byte & 0x7f) << significanceInBit; + significanceInBit += 7; + _a.label = 3; + case 3: + if (haveMoreByte && significanceInBit < 28) return [3 /*break*/, 1]; + _a.label = 4; + case 4: + if (!haveMoreByte) return [3 /*break*/, 9]; + // Switch to float arithmetic + zigZagEncoded = zigZagEncoded; + significanceInFloat = 268435456; // 2 ** 28. + _a.label = 5; + case 5: return [4 /*yield*/, AvroParser.readByte(stream, options)]; + case 6: + byte = _a.sent(); + zigZagEncoded += (byte & 0x7f) * significanceInFloat; + significanceInFloat *= 128; // 2 ** 7 + _a.label = 7; + case 7: + if (byte & 0x80) return [3 /*break*/, 5]; + _a.label = 8; + case 8: + res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2; + if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) { + throw new Error("Integer overflow."); + } + return [2 /*return*/, res]; + case 9: return [2 /*return*/, (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1)]; + } + }); + }); + }; + AvroParser.readLong = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + return [2 /*return*/, AvroParser.readZigZagLong(stream, options)]; + }); + }); + }; + AvroParser.readInt = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + return [2 /*return*/, AvroParser.readZigZagLong(stream, options)]; + }); + }); + }; + AvroParser.readNull = function () { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + return [2 /*return*/, null]; + }); + }); + }; + AvroParser.readBoolean = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var b; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, AvroParser.readByte(stream, options)]; + case 1: + b = _a.sent(); + if (b == 1) { + return [2 /*return*/, true]; + } + else if (b == 0) { + return [2 /*return*/, false]; + } + else { + throw new Error("Byte was not a boolean."); + } + } + }); + }); + }; + AvroParser.readFloat = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var u8arr, view; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, AvroParser.readFixedBytes(stream, 4, options)]; + case 1: + u8arr = _a.sent(); + view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return [2 /*return*/, view.getFloat32(0, true)]; // littleEndian = true + } + }); + }); + }; + AvroParser.readDouble = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var u8arr, view; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, AvroParser.readFixedBytes(stream, 8, options)]; + case 1: + u8arr = _a.sent(); + view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return [2 /*return*/, view.getFloat64(0, true)]; // littleEndian = true + } + }); + }); + }; + AvroParser.readBytes = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var size; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, AvroParser.readLong(stream, options)]; + case 1: + size = _a.sent(); + if (size < 0) { + throw new Error("Bytes size was negative."); + } + return [4 /*yield*/, stream.read(size, { abortSignal: options.abortSignal })]; + case 2: return [2 /*return*/, _a.sent()]; + } + }); + }); + }; + AvroParser.readString = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var u8arr, utf8decoder; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, AvroParser.readBytes(stream, options)]; + case 1: + u8arr = _a.sent(); + // polyfill TextDecoder to be backward compatible with older + // nodejs that doesn't expose TextDecoder as a global variable + if (typeof TextDecoder === "undefined" && "function" !== "undefined") { + global.TextDecoder = __webpack_require__(669).TextDecoder; + } + utf8decoder = new TextDecoder(); + return [2 /*return*/, utf8decoder.decode(u8arr)]; + } + }); + }); + }; + AvroParser.readMapPair = function (stream, readItemMethod, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var key, value; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, AvroParser.readString(stream, options)]; + case 1: + key = _a.sent(); + return [4 /*yield*/, readItemMethod(stream, options)]; + case 2: + value = _a.sent(); + return [2 /*return*/, { key: key, value: value }]; + } + }); + }); + }; + AvroParser.readMap = function (stream, readItemMethod, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var readPairMethod, pairs, dict, _i, pairs_1, pair; + var _this = this; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + readPairMethod = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(_this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, AvroParser.readMapPair(stream, readItemMethod, options)]; + case 1: return [2 /*return*/, _a.sent()]; + } + }); + }); + }; + return [4 /*yield*/, AvroParser.readArray(stream, readPairMethod, options)]; + case 1: + pairs = _a.sent(); + dict = {}; + for (_i = 0, pairs_1 = pairs; _i < pairs_1.length; _i++) { + pair = pairs_1[_i]; + dict[pair.key] = pair.value; + } + return [2 /*return*/, dict]; + } + }); + }); + }; + AvroParser.readArray = function (stream, readItemMethod, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var items, count, item; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + items = []; + return [4 /*yield*/, AvroParser.readLong(stream, options)]; + case 1: + count = _a.sent(); + _a.label = 2; + case 2: + if (!(count != 0)) return [3 /*break*/, 8]; + if (!(count < 0)) return [3 /*break*/, 4]; + // Ignore block sizes + return [4 /*yield*/, AvroParser.readLong(stream, options)]; + case 3: + // Ignore block sizes + _a.sent(); + count = -count; + _a.label = 4; + case 4: + if (!count--) return [3 /*break*/, 6]; + return [4 /*yield*/, readItemMethod(stream, options)]; + case 5: + item = _a.sent(); + items.push(item); + return [3 /*break*/, 4]; + case 6: return [4 /*yield*/, AvroParser.readLong(stream, options)]; + case 7: + count = _a.sent(); + return [3 /*break*/, 2]; + case 8: return [2 /*return*/, items]; + } + }); + }); + }; + return AvroParser; +}()); var AvroComplex; (function (AvroComplex) { AvroComplex["RECORD"] = "record"; @@ -32165,11 +33740,13 @@ var AvroComplex; AvroComplex["UNION"] = "union"; AvroComplex["FIXED"] = "fixed"; })(AvroComplex || (AvroComplex = {})); -class AvroType { +var AvroType = /** @class */ (function () { + function AvroType() { + } /** * Determines the AvroType from the Avro Schema. */ - static fromSchema(schema) { + AvroType.fromSchema = function (schema) { if (typeof schema === "string") { return AvroType.fromStringSchema(schema); } @@ -32179,8 +33756,8 @@ class AvroType { else { return AvroType.fromObjectSchema(schema); } - } - static fromStringSchema(schema) { + }; + AvroType.fromStringSchema = function (schema) { switch (schema) { case AvroPrimitive.NULL: case AvroPrimitive.BOOLEAN: @@ -32192,14 +33769,14 @@ class AvroType { case AvroPrimitive.STRING: return new AvroPrimitiveType(schema); default: - throw new Error(`Unexpected Avro type ${schema}`); + throw new Error("Unexpected Avro type " + schema); } - } - static fromArraySchema(schema) { + }; + AvroType.fromArraySchema = function (schema) { return new AvroUnionType(schema.map(AvroType.fromSchema)); - } - static fromObjectSchema(schema) { - const type = schema.type; + }; + AvroType.fromObjectSchema = function (schema) { + var type = schema.type; // Primitives can be defined as strings or objects try { return AvroType.fromStringSchema(type); @@ -32208,39 +33785,41 @@ class AvroType { switch (type) { case AvroComplex.RECORD: if (schema.aliases) { - throw new Error(`aliases currently is not supported, schema: ${schema}`); + throw new Error("aliases currently is not supported, schema: " + schema); } if (!schema.name) { - throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`); + throw new Error("Required attribute 'name' doesn't exist on schema: " + schema); } - const fields = {}; + var fields = {}; if (!schema.fields) { - throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`); + throw new Error("Required attribute 'fields' doesn't exist on schema: " + schema); } - for (const field of schema.fields) { + for (var _i = 0, _a = schema.fields; _i < _a.length; _i++) { + var field = _a[_i]; fields[field.name] = AvroType.fromSchema(field.type); } return new AvroRecordType(fields, schema.name); case AvroComplex.ENUM: if (schema.aliases) { - throw new Error(`aliases currently is not supported, schema: ${schema}`); + throw new Error("aliases currently is not supported, schema: " + schema); } if (!schema.symbols) { - throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`); + throw new Error("Required attribute 'symbols' doesn't exist on schema: " + schema); } return new AvroEnumType(schema.symbols); case AvroComplex.MAP: if (!schema.values) { - throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`); + throw new Error("Required attribute 'values' doesn't exist on schema: " + schema); } return new AvroMapType(AvroType.fromSchema(schema.values)); case AvroComplex.ARRAY: // Unused today case AvroComplex.FIXED: // Unused today default: - throw new Error(`Unexpected Avro type ${type} in ${schema}`); + throw new Error("Unexpected Avro type " + type + " in " + schema); } - } -} + }; + return AvroType; +}()); var AvroPrimitive; (function (AvroPrimitive) { AvroPrimitive["NULL"] = "null"; @@ -32252,87 +33831,179 @@ var AvroPrimitive; AvroPrimitive["BYTES"] = "bytes"; AvroPrimitive["STRING"] = "string"; })(AvroPrimitive || (AvroPrimitive = {})); -class AvroPrimitiveType extends AvroType { - constructor(primitive) { - super(); - this._primitive = primitive; - } - async read(stream, options = {}) { - switch (this._primitive) { - case AvroPrimitive.NULL: - return await AvroParser.readNull(); - case AvroPrimitive.BOOLEAN: - return await AvroParser.readBoolean(stream, options); - case AvroPrimitive.INT: - return await AvroParser.readInt(stream, options); - case AvroPrimitive.LONG: - return await AvroParser.readLong(stream, options); - case AvroPrimitive.FLOAT: - return await AvroParser.readFloat(stream, options); - case AvroPrimitive.DOUBLE: - return await AvroParser.readDouble(stream, options); - case AvroPrimitive.BYTES: - return await AvroParser.readBytes(stream, options); - case AvroPrimitive.STRING: - return await AvroParser.readString(stream, options); - default: - throw new Error("Unknown Avro Primitive"); - } - } -} -class AvroEnumType extends AvroType { - constructor(symbols) { - super(); - this._symbols = symbols; - } - async read(stream, options = {}) { - const value = await AvroParser.readInt(stream, options); - return this._symbols[value]; - } -} -class AvroUnionType extends AvroType { - constructor(types) { - super(); - this._types = types; - } - async read(stream, options = {}) { - const typeIndex = await AvroParser.readInt(stream, options); - return await this._types[typeIndex].read(stream, options); +var AvroPrimitiveType = /** @class */ (function (_super) { + tslib.__extends(AvroPrimitiveType, _super); + function AvroPrimitiveType(primitive) { + var _this = _super.call(this) || this; + _this._primitive = primitive; + return _this; } -} -class AvroMapType extends AvroType { - constructor(itemType) { - super(); - this._itemType = itemType; + AvroPrimitiveType.prototype.read = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = this._primitive; + switch (_a) { + case AvroPrimitive.NULL: return [3 /*break*/, 1]; + case AvroPrimitive.BOOLEAN: return [3 /*break*/, 3]; + case AvroPrimitive.INT: return [3 /*break*/, 5]; + case AvroPrimitive.LONG: return [3 /*break*/, 7]; + case AvroPrimitive.FLOAT: return [3 /*break*/, 9]; + case AvroPrimitive.DOUBLE: return [3 /*break*/, 11]; + case AvroPrimitive.BYTES: return [3 /*break*/, 13]; + case AvroPrimitive.STRING: return [3 /*break*/, 15]; + } + return [3 /*break*/, 17]; + case 1: return [4 /*yield*/, AvroParser.readNull()]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: return [4 /*yield*/, AvroParser.readBoolean(stream, options)]; + case 4: return [2 /*return*/, _b.sent()]; + case 5: return [4 /*yield*/, AvroParser.readInt(stream, options)]; + case 6: return [2 /*return*/, _b.sent()]; + case 7: return [4 /*yield*/, AvroParser.readLong(stream, options)]; + case 8: return [2 /*return*/, _b.sent()]; + case 9: return [4 /*yield*/, AvroParser.readFloat(stream, options)]; + case 10: return [2 /*return*/, _b.sent()]; + case 11: return [4 /*yield*/, AvroParser.readDouble(stream, options)]; + case 12: return [2 /*return*/, _b.sent()]; + case 13: return [4 /*yield*/, AvroParser.readBytes(stream, options)]; + case 14: return [2 /*return*/, _b.sent()]; + case 15: return [4 /*yield*/, AvroParser.readString(stream, options)]; + case 16: return [2 /*return*/, _b.sent()]; + case 17: throw new Error("Unknown Avro Primitive"); + } + }); + }); + }; + return AvroPrimitiveType; +}(AvroType)); +var AvroEnumType = /** @class */ (function (_super) { + tslib.__extends(AvroEnumType, _super); + function AvroEnumType(symbols) { + var _this = _super.call(this) || this; + _this._symbols = symbols; + return _this; } - async read(stream, options = {}) { - const readItemMethod = async (s, options) => { - return await this._itemType.read(s, options); - }; - return await AvroParser.readMap(stream, readItemMethod, options); + AvroEnumType.prototype.read = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var value; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, AvroParser.readInt(stream, options)]; + case 1: + value = _a.sent(); + return [2 /*return*/, this._symbols[value]]; + } + }); + }); + }; + return AvroEnumType; +}(AvroType)); +var AvroUnionType = /** @class */ (function (_super) { + tslib.__extends(AvroUnionType, _super); + function AvroUnionType(types) { + var _this = _super.call(this) || this; + _this._types = types; + return _this; } -} -class AvroRecordType extends AvroType { - constructor(fields, name) { - super(); - this._fields = fields; - this._name = name; + AvroUnionType.prototype.read = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var typeIndex; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, AvroParser.readInt(stream, options)]; + case 1: + typeIndex = _a.sent(); + return [4 /*yield*/, this._types[typeIndex].read(stream, options)]; + case 2: return [2 /*return*/, _a.sent()]; + } + }); + }); + }; + return AvroUnionType; +}(AvroType)); +var AvroMapType = /** @class */ (function (_super) { + tslib.__extends(AvroMapType, _super); + function AvroMapType(itemType) { + var _this = _super.call(this) || this; + _this._itemType = itemType; + return _this; } - async read(stream, options = {}) { - const record = {}; - record["$schema"] = this._name; - for (const key in this._fields) { - if (this._fields.hasOwnProperty(key)) { - record[key] = await this._fields[key].read(stream, options); - } - } - return record; + AvroMapType.prototype.read = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var readItemMethod; + var _this = this; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + readItemMethod = function (s, options) { return tslib.__awaiter(_this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, this._itemType.read(s, options)]; + case 1: return [2 /*return*/, _a.sent()]; + } + }); + }); }; + return [4 /*yield*/, AvroParser.readMap(stream, readItemMethod, options)]; + case 1: return [2 /*return*/, _a.sent()]; + } + }); + }); + }; + return AvroMapType; +}(AvroType)); +var AvroRecordType = /** @class */ (function (_super) { + tslib.__extends(AvroRecordType, _super); + function AvroRecordType(fields, name) { + var _this = _super.call(this) || this; + _this._fields = fields; + _this._name = name; + return _this; } -} + AvroRecordType.prototype.read = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var record, _a, _b, _i, key, _c, _d; + return tslib.__generator(this, function (_e) { + switch (_e.label) { + case 0: + record = {}; + record["$schema"] = this._name; + _a = []; + for (_b in this._fields) + _a.push(_b); + _i = 0; + _e.label = 1; + case 1: + if (!(_i < _a.length)) return [3 /*break*/, 4]; + key = _a[_i]; + if (!this._fields.hasOwnProperty(key)) return [3 /*break*/, 3]; + _c = record; + _d = key; + return [4 /*yield*/, this._fields[key].read(stream, options)]; + case 2: + _c[_d] = _e.sent(); + _e.label = 3; + case 3: + _i++; + return [3 /*break*/, 1]; + case 4: return [2 /*return*/, record]; + } + }); + }); + }; + return AvroRecordType; +}(AvroType)); // Copyright (c) Microsoft Corporation. -class AvroReader { - constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { +var AvroReader = /** @class */ (function () { + function AvroReader(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { this._dataStream = dataStream; this._headerStream = headerStream || dataStream; this._initialized = false; @@ -32340,178 +34011,261 @@ class AvroReader { this._objectIndex = indexWithinCurrentBlock || 0; this._initialBlockOffset = currentBlockOffset || 0; } - get blockOffset() { - return this._blockOffset; - } - get objectIndex() { - return this._objectIndex; - } - async initialize(options = {}) { - const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { - abortSignal: options.abortSignal - }); - if (!arraysEqual(header, AVRO_INIT_BYTES)) { - throw new Error("Stream is not an Avro file."); - } - // File metadata is written as if defined by the following map schema: - // { "type": "map", "values": "bytes"} - this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, { - abortSignal: options.abortSignal - }); - // Validate codec - const codec = this._metadata[AVRO_CODEC_KEY]; - if (!(codec == undefined || codec == "null")) { - throw new Error("Codecs are not supported"); - } - // The 16-byte, randomly-generated sync marker for this file. - this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, { - abortSignal: options.abortSignal - }); - // Parse the schema - const schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]); - this._itemType = AvroType.fromSchema(schema); - if (this._blockOffset == 0) { - this._blockOffset = this._initialBlockOffset + this._dataStream.position; - } - this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, { - abortSignal: options.abortSignal + Object.defineProperty(AvroReader.prototype, "blockOffset", { + get: function () { + return this._blockOffset; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(AvroReader.prototype, "objectIndex", { + get: function () { + return this._objectIndex; + }, + enumerable: false, + configurable: true + }); + AvroReader.prototype.initialize = function (options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var header, _a, codec, _b, schema, _c, i; + return tslib.__generator(this, function (_d) { + switch (_d.label) { + case 0: return [4 /*yield*/, AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { + abortSignal: options.abortSignal + })]; + case 1: + header = _d.sent(); + if (!arraysEqual(header, AVRO_INIT_BYTES)) { + throw new Error("Stream is not an Avro file."); + } + // File metadata is written as if defined by the following map schema: + // { "type": "map", "values": "bytes"} + _a = this; + return [4 /*yield*/, AvroParser.readMap(this._headerStream, AvroParser.readString, { + abortSignal: options.abortSignal + })]; + case 2: + // File metadata is written as if defined by the following map schema: + // { "type": "map", "values": "bytes"} + _a._metadata = _d.sent(); + codec = this._metadata[AVRO_CODEC_KEY]; + if (!(codec == undefined || codec == "null")) { + throw new Error("Codecs are not supported"); + } + // The 16-byte, randomly-generated sync marker for this file. + _b = this; + return [4 /*yield*/, AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal + })]; + case 3: + // The 16-byte, randomly-generated sync marker for this file. + _b._syncMarker = _d.sent(); + schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]); + this._itemType = AvroType.fromSchema(schema); + if (this._blockOffset == 0) { + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + } + _c = this; + return [4 /*yield*/, AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal + })]; + case 4: + _c._itemsRemainingInBlock = _d.sent(); + // skip block length + return [4 /*yield*/, AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })]; + case 5: + // skip block length + _d.sent(); + this._initialized = true; + if (!(this._objectIndex && this._objectIndex > 0)) return [3 /*break*/, 9]; + i = 0; + _d.label = 6; + case 6: + if (!(i < this._objectIndex)) return [3 /*break*/, 9]; + return [4 /*yield*/, this._itemType.read(this._dataStream, { abortSignal: options.abortSignal })]; + case 7: + _d.sent(); + this._itemsRemainingInBlock--; + _d.label = 8; + case 8: + i++; + return [3 /*break*/, 6]; + case 9: return [2 /*return*/]; + } + }); }); - // skip block length - await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); - this._initialized = true; - if (this._objectIndex && this._objectIndex > 0) { - for (let i = 0; i < this._objectIndex; i++) { - await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal }); - this._itemsRemainingInBlock--; - } - } - } - hasNext() { + }; + AvroReader.prototype.hasNext = function () { return !this._initialized || this._itemsRemainingInBlock > 0; - } - parseObjects(options = {}) { - return tslib.__asyncGenerator(this, arguments, function* parseObjects_1() { - if (!this._initialized) { - yield tslib.__await(this.initialize(options)); - } - while (this.hasNext()) { - const result = yield tslib.__await(this._itemType.read(this._dataStream, { - abortSignal: options.abortSignal - })); - this._itemsRemainingInBlock--; - this._objectIndex++; - if (this._itemsRemainingInBlock == 0) { - const marker = yield tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, { - abortSignal: options.abortSignal - })); - this._blockOffset = this._initialBlockOffset + this._dataStream.position; - this._objectIndex = 0; - if (!arraysEqual(this._syncMarker, marker)) { - throw new Error("Stream is not a valid Avro file."); - } - try { - this._itemsRemainingInBlock = yield tslib.__await(AvroParser.readLong(this._dataStream, { - abortSignal: options.abortSignal - })); - } - catch (err) { + }; + AvroReader.prototype.parseObjects = function (options) { + if (options === void 0) { options = {}; } + return tslib.__asyncGenerator(this, arguments, function parseObjects_1() { + var result, marker, _a, err_1; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + if (!!this._initialized) return [3 /*break*/, 2]; + return [4 /*yield*/, tslib.__await(this.initialize(options))]; + case 1: + _b.sent(); + _b.label = 2; + case 2: + if (!this.hasNext()) return [3 /*break*/, 13]; + return [4 /*yield*/, tslib.__await(this._itemType.read(this._dataStream, { + abortSignal: options.abortSignal + }))]; + case 3: + result = _b.sent(); + this._itemsRemainingInBlock--; + this._objectIndex++; + if (!(this._itemsRemainingInBlock == 0)) return [3 /*break*/, 10]; + return [4 /*yield*/, tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal + }))]; + case 4: + marker = _b.sent(); + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + this._objectIndex = 0; + if (!arraysEqual(this._syncMarker, marker)) { + throw new Error("Stream is not a valid Avro file."); + } + _b.label = 5; + case 5: + _b.trys.push([5, 7, , 8]); + _a = this; + return [4 /*yield*/, tslib.__await(AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal + }))]; + case 6: + _a._itemsRemainingInBlock = _b.sent(); + return [3 /*break*/, 8]; + case 7: + err_1 = _b.sent(); // We hit the end of the stream. this._itemsRemainingInBlock = 0; - } - if (this._itemsRemainingInBlock > 0) { + return [3 /*break*/, 8]; + case 8: + if (!(this._itemsRemainingInBlock > 0)) return [3 /*break*/, 10]; // Ignore block size - yield tslib.__await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })); - } + return [4 /*yield*/, tslib.__await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }))]; + case 9: + // Ignore block size + _b.sent(); + _b.label = 10; + case 10: return [4 /*yield*/, tslib.__await(result)]; + case 11: return [4 /*yield*/, _b.sent()]; + case 12: + _b.sent(); + return [3 /*break*/, 2]; + case 13: return [2 /*return*/]; } - yield yield tslib.__await(result); - } + }); }); - } -} + }; + return AvroReader; +}()); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -class AvroReadable { -} +var AvroReadable = /** @class */ (function () { + function AvroReadable() { + } + return AvroReadable; +}()); // Copyright (c) Microsoft Corporation. -const ABORT_ERROR = new abortController.AbortError("Reading from the avro stream was aborted."); -class AvroReadableFromStream extends AvroReadable { - constructor(readable) { - super(); - this._readable = readable; - this._position = 0; +var ABORT_ERROR = new abortController.AbortError("Reading from the avro stream was aborted."); +var AvroReadableFromStream = /** @class */ (function (_super) { + tslib.__extends(AvroReadableFromStream, _super); + function AvroReadableFromStream(readable) { + var _this = _super.call(this) || this; + _this._readable = readable; + _this._position = 0; + return _this; } - toUint8Array(data) { + AvroReadableFromStream.prototype.toUint8Array = function (data) { if (typeof data === "string") { return Buffer.from(data); } return data; - } - get position() { - return this._position; - } - async read(size, options = {}) { + }; + Object.defineProperty(AvroReadableFromStream.prototype, "position", { + get: function () { + return this._position; + }, + enumerable: false, + configurable: true + }); + AvroReadableFromStream.prototype.read = function (size, options) { var _a; - if ((_a = options.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { - throw ABORT_ERROR; - } - if (size < 0) { - throw new Error(`size parameter should be positive: ${size}`); - } - if (size === 0) { - return new Uint8Array(); - } - if (!this._readable.readable) { - throw new Error("Stream no longer readable."); - } - // See if there is already enough data. - const chunk = this._readable.read(size); - if (chunk) { - this._position += chunk.length; - // chunk.length maybe less than desired size if the stream ends. - return this.toUint8Array(chunk); - } - else { - // register callback to wait for enough data to read - return new Promise((resolve, reject) => { - const cleanUp = () => { - this._readable.removeListener("readable", readableCallback); - this._readable.removeListener("error", rejectCallback); - this._readable.removeListener("end", rejectCallback); - this._readable.removeListener("close", rejectCallback); - if (options.abortSignal) { - options.abortSignal.removeEventListener("abort", abortHandler); - } - }; - const readableCallback = () => { - const chunk = this._readable.read(size); - if (chunk) { - this._position += chunk.length; - cleanUp(); - // chunk.length maybe less than desired size if the stream ends. - resolve(this.toUint8Array(chunk)); - } - }; - const rejectCallback = () => { - cleanUp(); - reject(); - }; - const abortHandler = () => { - cleanUp(); - reject(ABORT_ERROR); - }; - this._readable.on("readable", readableCallback); - this._readable.once("error", rejectCallback); - this._readable.once("end", rejectCallback); - this._readable.once("close", rejectCallback); - if (options.abortSignal) { - options.abortSignal.addEventListener("abort", abortHandler); + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var chunk; + var _this = this; + return tslib.__generator(this, function (_b) { + if ((_a = options.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { + throw ABORT_ERROR; + } + if (size < 0) { + throw new Error("size parameter should be positive: " + size); + } + if (size === 0) { + return [2 /*return*/, new Uint8Array()]; + } + if (!this._readable.readable) { + throw new Error("Stream no longer readable."); + } + chunk = this._readable.read(size); + if (chunk) { + this._position += chunk.length; + // chunk.length maybe less than desired size if the stream ends. + return [2 /*return*/, this.toUint8Array(chunk)]; + } + else { + // register callback to wait for enough data to read + return [2 /*return*/, new Promise(function (resolve, reject) { + var cleanUp = function () { + _this._readable.removeListener("readable", readableCallback); + _this._readable.removeListener("error", rejectCallback); + _this._readable.removeListener("end", rejectCallback); + _this._readable.removeListener("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.removeEventListener("abort", abortHandler); + } + }; + var readableCallback = function () { + var chunk = _this._readable.read(size); + if (chunk) { + _this._position += chunk.length; + cleanUp(); + // chunk.length maybe less than desired size if the stream ends. + resolve(_this.toUint8Array(chunk)); + } + }; + var rejectCallback = function () { + cleanUp(); + reject(); + }; + var abortHandler = function () { + cleanUp(); + reject(ABORT_ERROR); + }; + _this._readable.on("readable", readableCallback); + _this._readable.once("error", rejectCallback); + _this._readable.once("end", rejectCallback); + _this._readable.once("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.addEventListener("abort", abortHandler); + } + })]; } }); - } - } -} + }); + }; + return AvroReadableFromStream; +}(AvroReadable)); // Copyright (c) Microsoft Corporation. /** @@ -32519,103 +34273,121 @@ class AvroReadableFromStream extends AvroReadable { * * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query. */ -class BlobQuickQueryStream extends stream.Readable { +var BlobQuickQueryStream = /** @class */ (function (_super) { + tslib.__extends(BlobQuickQueryStream, _super); /** * Creates an instance of BlobQuickQueryStream. * * @param source - The current ReadableStream returned from getter * @param options - */ - constructor(source, options = {}) { - super(); - this.avroPaused = true; - this.source = source; - this.onProgress = options.onProgress; - this.onError = options.onError; - this.avroReader = new AvroReader(new AvroReadableFromStream(this.source)); - this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal }); - } - _read() { + function BlobQuickQueryStream(source, options) { + if (options === void 0) { options = {}; } + var _this = _super.call(this) || this; + _this.avroPaused = true; + _this.source = source; + _this.onProgress = options.onProgress; + _this.onError = options.onError; + _this.avroReader = new AvroReader(new AvroReadableFromStream(_this.source)); + _this.avroIter = _this.avroReader.parseObjects({ abortSignal: options.abortSignal }); + return _this; + } + BlobQuickQueryStream.prototype._read = function () { + var _this = this; if (this.avroPaused) { - this.readInternal().catch((err) => { - this.emit("error", err); + this.readInternal().catch(function (err) { + _this.emit("error", err); }); } - } - async readInternal() { - this.avroPaused = false; - let avroNext; - do { - avroNext = await this.avroIter.next(); - if (avroNext.done) { - break; - } - const obj = avroNext.value; - const schema = obj.$schema; - if (typeof schema !== "string") { - throw Error("Missing schema in avro record."); - } - switch (schema) { - case "com.microsoft.azure.storage.queryBlobContents.resultData": - const data = obj.data; - if (data instanceof Uint8Array === false) { - throw Error("Invalid data in avro result record."); - } - if (!this.push(Buffer.from(data))) { - this.avroPaused = true; - } - break; - case "com.microsoft.azure.storage.queryBlobContents.progress": - const bytesScanned = obj.bytesScanned; - if (typeof bytesScanned !== "number") { - throw Error("Invalid bytesScanned in avro progress record."); - } - if (this.onProgress) { - this.onProgress({ loadedBytes: bytesScanned }); - } - break; - case "com.microsoft.azure.storage.queryBlobContents.end": - if (this.onProgress) { - const totalBytes = obj.totalBytes; - if (typeof totalBytes !== "number") { - throw Error("Invalid totalBytes in avro end record."); - } - this.onProgress({ loadedBytes: totalBytes }); - } - this.push(null); - break; - case "com.microsoft.azure.storage.queryBlobContents.error": - if (this.onError) { - const fatal = obj.fatal; - if (typeof fatal !== "boolean") { - throw Error("Invalid fatal in avro error record."); - } - const name = obj.name; - if (typeof name !== "string") { - throw Error("Invalid name in avro error record."); + }; + BlobQuickQueryStream.prototype.readInternal = function () { + return tslib.__awaiter(this, void 0, void 0, function () { + var avroNext, obj, schema, data, bytesScanned, totalBytes, fatal, name_1, description, position; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + this.avroPaused = false; + _a.label = 1; + case 1: return [4 /*yield*/, this.avroIter.next()]; + case 2: + avroNext = _a.sent(); + if (avroNext.done) { + return [3 /*break*/, 4]; } - const description = obj.description; - if (typeof description !== "string") { - throw Error("Invalid description in avro error record."); + obj = avroNext.value; + schema = obj.$schema; + if (typeof schema !== "string") { + throw Error("Missing schema in avro record."); } - const position = obj.position; - if (typeof position !== "number") { - throw Error("Invalid position in avro error record."); + switch (schema) { + case "com.microsoft.azure.storage.queryBlobContents.resultData": + data = obj.data; + if (data instanceof Uint8Array === false) { + throw Error("Invalid data in avro result record."); + } + if (!this.push(Buffer.from(data))) { + this.avroPaused = true; + } + break; + case "com.microsoft.azure.storage.queryBlobContents.progress": + bytesScanned = obj.bytesScanned; + if (typeof bytesScanned !== "number") { + throw Error("Invalid bytesScanned in avro progress record."); + } + if (this.onProgress) { + this.onProgress({ loadedBytes: bytesScanned }); + } + break; + case "com.microsoft.azure.storage.queryBlobContents.end": + if (this.onProgress) { + totalBytes = obj.totalBytes; + if (typeof totalBytes !== "number") { + throw Error("Invalid totalBytes in avro end record."); + } + this.onProgress({ loadedBytes: totalBytes }); + } + this.push(null); + break; + case "com.microsoft.azure.storage.queryBlobContents.error": + if (this.onError) { + fatal = obj.fatal; + if (typeof fatal !== "boolean") { + throw Error("Invalid fatal in avro error record."); + } + name_1 = obj.name; + if (typeof name_1 !== "string") { + throw Error("Invalid name in avro error record."); + } + description = obj.description; + if (typeof description !== "string") { + throw Error("Invalid description in avro error record."); + } + position = obj.position; + if (typeof position !== "number") { + throw Error("Invalid position in avro error record."); + } + this.onError({ + position: position, + name: name_1, + isFatal: fatal, + description: description + }); + } + break; + default: + throw Error("Unknown schema " + schema + " in avro progress record."); } - this.onError({ - position, - name, - isFatal: fatal, - description - }); - } - break; - default: - throw Error(`Unknown schema ${schema} in avro progress record.`); - } - } while (!avroNext.done && !this.avroPaused); - } -} + _a.label = 3; + case 3: + if (!avroNext.done && !this.avroPaused) return [3 /*break*/, 1]; + _a.label = 4; + case 4: return [2 /*return*/]; + } + }); + }); + }; + return BlobQuickQueryStream; +}(stream.Readable)); // Copyright (c) Microsoft Corporation. /** @@ -32624,362 +34396,508 @@ class BlobQuickQueryStream extends stream.Readable { * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will * parse avor data returned by blob query. */ -class BlobQueryResponse { +var BlobQueryResponse = /** @class */ (function () { /** * Creates an instance of BlobQueryResponse. * * @param originalResponse - * @param options - */ - constructor(originalResponse, options = {}) { + function BlobQueryResponse(originalResponse, options) { + if (options === void 0) { options = {}; } this.originalResponse = originalResponse; this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); } - /** - * Indicates that the service supports - * requests for partial file content. - * - * @readonly - */ - get acceptRanges() { - return this.originalResponse.acceptRanges; - } - /** - * Returns if it was previously specified - * for the file. - * - * @readonly - */ - get cacheControl() { - return this.originalResponse.cacheControl; - } - /** - * Returns the value that was specified - * for the 'x-ms-content-disposition' header and specifies how to process the - * response. - * - * @readonly - */ - get contentDisposition() { - return this.originalResponse.contentDisposition; - } - /** - * Returns the value that was specified - * for the Content-Encoding request header. - * - * @readonly - */ - get contentEncoding() { - return this.originalResponse.contentEncoding; - } - /** - * Returns the value that was specified - * for the Content-Language request header. - * - * @readonly - */ - get contentLanguage() { - return this.originalResponse.contentLanguage; - } - /** - * The current sequence number for a - * page blob. This header is not returned for block blobs or append blobs. - * - * @readonly - */ - get blobSequenceNumber() { - return this.originalResponse.blobSequenceNumber; - } - /** - * The blob's type. Possible values include: - * 'BlockBlob', 'PageBlob', 'AppendBlob'. - * - * @readonly - */ - get blobType() { - return this.originalResponse.blobType; - } - /** - * The number of bytes present in the - * response body. - * - * @readonly - */ - get contentLength() { - return this.originalResponse.contentLength; - } - /** - * If the file has an MD5 hash and the - * request is to read the full file, this response header is returned so that - * the client can check for message content integrity. If the request is to - * read a specified range and the 'x-ms-range-get-content-md5' is set to - * true, then the request returns an MD5 hash for the range, as long as the - * range size is less than or equal to 4 MB. If neither of these sets of - * conditions is true, then no value is returned for the 'Content-MD5' - * header. - * - * @readonly - */ - get contentMD5() { - return this.originalResponse.contentMD5; - } - /** - * Indicates the range of bytes returned if - * the client requested a subset of the file by setting the Range request - * header. - * - * @readonly - */ - get contentRange() { - return this.originalResponse.contentRange; - } - /** - * The content type specified for the file. - * The default content type is 'application/octet-stream' - * - * @readonly - */ - get contentType() { - return this.originalResponse.contentType; - } - /** - * Conclusion time of the last attempted - * Copy File operation where this file was the destination file. This value - * can specify the time of a completed, aborted, or failed copy attempt. - * - * @readonly - */ - get copyCompletedOn() { - return undefined; - } - /** - * String identifier for the last attempted Copy - * File operation where this file was the destination file. - * - * @readonly - */ - get copyId() { - return this.originalResponse.copyId; - } - /** - * Contains the number of bytes copied and - * the total bytes in the source in the last attempted Copy File operation - * where this file was the destination file. Can show between 0 and - * Content-Length bytes copied. - * - * @readonly - */ - get copyProgress() { - return this.originalResponse.copyProgress; - } - /** - * URL up to 2KB in length that specifies the - * source file used in the last attempted Copy File operation where this file - * was the destination file. - * - * @readonly - */ - get copySource() { - return this.originalResponse.copySource; - } - /** - * State of the copy operation - * identified by 'x-ms-copy-id'. Possible values include: 'pending', - * 'success', 'aborted', 'failed' - * - * @readonly - */ - get copyStatus() { - return this.originalResponse.copyStatus; - } - /** - * Only appears when - * x-ms-copy-status is failed or pending. Describes cause of fatal or - * non-fatal copy operation failure. - * - * @readonly - */ - get copyStatusDescription() { - return this.originalResponse.copyStatusDescription; - } - /** - * When a blob is leased, - * specifies whether the lease is of infinite or fixed duration. Possible - * values include: 'infinite', 'fixed'. - * - * @readonly - */ - get leaseDuration() { - return this.originalResponse.leaseDuration; - } - /** - * Lease state of the blob. Possible - * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. - * - * @readonly - */ - get leaseState() { - return this.originalResponse.leaseState; - } - /** - * The current lease status of the - * blob. Possible values include: 'locked', 'unlocked'. - * - * @readonly - */ - get leaseStatus() { - return this.originalResponse.leaseStatus; - } - /** - * A UTC date/time value generated by the service that - * indicates the time at which the response was initiated. - * - * @readonly - */ - get date() { - return this.originalResponse.date; - } - /** - * The number of committed blocks - * present in the blob. This header is returned only for append blobs. - * - * @readonly - */ - get blobCommittedBlockCount() { - return this.originalResponse.blobCommittedBlockCount; - } - /** - * The ETag contains a value that you can use to - * perform operations conditionally, in quotes. - * - * @readonly - */ - get etag() { - return this.originalResponse.etag; - } - /** - * The error code. - * - * @readonly - */ - get errorCode() { - return this.originalResponse.errorCode; - } - /** - * The value of this header is set to - * true if the file data and application metadata are completely encrypted - * using the specified algorithm. Otherwise, the value is set to false (when - * the file is unencrypted, or if only parts of the file/application metadata - * are encrypted). - * - * @readonly - */ - get isServerEncrypted() { - return this.originalResponse.isServerEncrypted; - } - /** - * If the blob has a MD5 hash, and if - * request contains range header (Range or x-ms-range), this response header - * is returned with the value of the whole blob's MD5 value. This value may - * or may not be equal to the value returned in Content-MD5 header, with the - * latter calculated from the requested range. - * - * @readonly - */ - get blobContentMD5() { - return this.originalResponse.blobContentMD5; - } - /** - * Returns the date and time the file was last - * modified. Any operation that modifies the file or its properties updates - * the last modified time. - * - * @readonly - */ - get lastModified() { - return this.originalResponse.lastModified; - } - /** - * A name-value pair - * to associate with a file storage object. - * - * @readonly - */ - get metadata() { - return this.originalResponse.metadata; - } - /** - * This header uniquely identifies the request - * that was made and can be used for troubleshooting the request. - * - * @readonly - */ - get requestId() { - return this.originalResponse.requestId; - } - /** - * If a client request id header is sent in the request, this header will be present in the - * response with the same value. - * - * @readonly - */ - get clientRequestId() { - return this.originalResponse.clientRequestId; - } - /** - * Indicates the version of the File service used - * to execute the request. - * - * @readonly - */ - get version() { - return this.originalResponse.version; - } - /** - * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned - * when the blob was encrypted with a customer-provided key. - * - * @readonly - */ - get encryptionKeySha256() { - return this.originalResponse.encryptionKeySha256; - } - /** - * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to - * true, then the request returns a crc64 for the range, as long as the range size is less than - * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is - * specified in the same request, it will fail with 400(Bad Request) - */ - get contentCrc64() { - return this.originalResponse.contentCrc64; - } - /** - * The response body as a browser Blob. - * Always undefined in node.js. - * - * @readonly - */ - get blobBody() { - return undefined; - } - /** - * The response body as a node.js Readable stream. - * Always undefined in the browser. - * - * It will parse avor data returned by blob query. - * - * @readonly - */ - get readableStreamBody() { - return coreHttp.isNode ? this.blobDownloadStream : undefined; - } - /** - * The HTTP response. - */ - get _response() { - return this.originalResponse._response; - } -} + Object.defineProperty(BlobQueryResponse.prototype, "acceptRanges", { + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get: function () { + return this.originalResponse.acceptRanges; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "cacheControl", { + /** + * Returns if it was previously specified + * for the file. + * + * @readonly + */ + get: function () { + return this.originalResponse.cacheControl; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "contentDisposition", { + /** + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly + */ + get: function () { + return this.originalResponse.contentDisposition; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "contentEncoding", { + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get: function () { + return this.originalResponse.contentEncoding; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "contentLanguage", { + /** + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly + */ + get: function () { + return this.originalResponse.contentLanguage; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "blobSequenceNumber", { + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get: function () { + return this.originalResponse.blobSequenceNumber; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "blobType", { + /** + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly + */ + get: function () { + return this.originalResponse.blobType; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "contentLength", { + /** + * The number of bytes present in the + * response body. + * + * @readonly + */ + get: function () { + return this.originalResponse.contentLength; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "contentMD5", { + /** + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly + */ + get: function () { + return this.originalResponse.contentMD5; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "contentRange", { + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get: function () { + return this.originalResponse.contentRange; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "contentType", { + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get: function () { + return this.originalResponse.contentType; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "copyCompletedOn", { + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly + */ + get: function () { + return undefined; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "copyId", { + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly + */ + get: function () { + return this.originalResponse.copyId; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "copyProgress", { + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get: function () { + return this.originalResponse.copyProgress; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "copySource", { + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get: function () { + return this.originalResponse.copySource; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "copyStatus", { + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get: function () { + return this.originalResponse.copyStatus; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "copyStatusDescription", { + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get: function () { + return this.originalResponse.copyStatusDescription; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "leaseDuration", { + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly + */ + get: function () { + return this.originalResponse.leaseDuration; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "leaseState", { + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get: function () { + return this.originalResponse.leaseState; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "leaseStatus", { + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get: function () { + return this.originalResponse.leaseStatus; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "date", { + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get: function () { + return this.originalResponse.date; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "blobCommittedBlockCount", { + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get: function () { + return this.originalResponse.blobCommittedBlockCount; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "etag", { + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get: function () { + return this.originalResponse.etag; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "errorCode", { + /** + * The error code. + * + * @readonly + */ + get: function () { + return this.originalResponse.errorCode; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "isServerEncrypted", { + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get: function () { + return this.originalResponse.isServerEncrypted; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "blobContentMD5", { + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get: function () { + return this.originalResponse.blobContentMD5; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "lastModified", { + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get: function () { + return this.originalResponse.lastModified; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "metadata", { + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get: function () { + return this.originalResponse.metadata; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "requestId", { + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get: function () { + return this.originalResponse.requestId; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "clientRequestId", { + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get: function () { + return this.originalResponse.clientRequestId; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "version", { + /** + * Indicates the version of the File service used + * to execute the request. + * + * @readonly + */ + get: function () { + return this.originalResponse.version; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "encryptionKeySha256", { + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get: function () { + return this.originalResponse.encryptionKeySha256; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "contentCrc64", { + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get: function () { + return this.originalResponse.contentCrc64; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "blobBody", { + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get: function () { + return undefined; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "readableStreamBody", { + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will parse avor data returned by blob query. + * + * @readonly + */ + get: function () { + return coreHttp.isNode ? this.blobDownloadStream : undefined; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "_response", { + /** + * The HTTP response. + */ + get: function () { + return this.originalResponse._response; + }, + enumerable: false, + configurable: true + }); + return BlobQueryResponse; +}()); // Copyright (c) Microsoft Corporation. (function (BlockBlobTier) { @@ -33059,7 +34977,6 @@ function ensureCpkIfSpecified(cpk, isHttps) { } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. /** * Function that converts PageRange and ClearRange to a common Range object. * PageRange and ClearRange have start and end while Range offset and count @@ -33067,18 +34984,18 @@ function ensureCpkIfSpecified(cpk, isHttps) { * @param response - Model PageBlob Range response */ function rangeResponseFromModel(response) { - const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ + var pageRange = (response._response.parsedBody.pageRange || []).map(function (x) { return ({ offset: x.start, count: x.end - x.start - })); - const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ + }); }); + var clearRange = (response._response.parsedBody.clearRange || []).map(function (x) { return ({ offset: x.start, count: x.end - x.start - })); - return Object.assign(Object.assign({}, response), { pageRange, - clearRange, _response: Object.assign(Object.assign({}, response._response), { parsedBody: { - pageRange, - clearRange + }); }); + return tslib.__assign(tslib.__assign({}, response), { pageRange: pageRange, + clearRange: clearRange, _response: tslib.__assign(tslib.__assign({}, response._response), { parsedBody: { + pageRange: pageRange, + clearRange: clearRange } }) }); } @@ -33089,48 +35006,64 @@ function rangeResponseFromModel(response) { * * @hidden */ -class BlobBeginCopyFromUrlPoller extends coreLro.Poller { - constructor(options) { - const { blobClient, copySource, intervalInMs = 15000, onProgress, resumeFrom, startCopyFromURLOptions } = options; - let state; +var BlobBeginCopyFromUrlPoller = /** @class */ (function (_super) { + tslib.__extends(BlobBeginCopyFromUrlPoller, _super); + function BlobBeginCopyFromUrlPoller(options) { + var _this = this; + var blobClient = options.blobClient, copySource = options.copySource, _a = options.intervalInMs, intervalInMs = _a === void 0 ? 15000 : _a, onProgress = options.onProgress, resumeFrom = options.resumeFrom, startCopyFromURLOptions = options.startCopyFromURLOptions; + var state; if (resumeFrom) { state = JSON.parse(resumeFrom).state; } - const operation = makeBlobBeginCopyFromURLPollOperation(Object.assign(Object.assign({}, state), { blobClient, - copySource, - startCopyFromURLOptions })); - super(operation); + var operation = makeBlobBeginCopyFromURLPollOperation(tslib.__assign(tslib.__assign({}, state), { blobClient: blobClient, + copySource: copySource, + startCopyFromURLOptions: startCopyFromURLOptions })); + _this = _super.call(this, operation) || this; if (typeof onProgress === "function") { - this.onProgress(onProgress); + _this.onProgress(onProgress); } - this.intervalInMs = intervalInMs; + _this.intervalInMs = intervalInMs; + return _this; } - delay() { + BlobBeginCopyFromUrlPoller.prototype.delay = function () { return coreHttp.delay(this.intervalInMs); - } -} + }; + return BlobBeginCopyFromUrlPoller; +}(coreLro.Poller)); /** * Note: Intentionally using function expression over arrow function expression * so that the function can be invoked with a different context. * This affects what `this` refers to. * @hidden */ -const cancel = async function cancel(options = {}) { - const state = this.state; - const { copyId } = state; - if (state.isCompleted) { - return makeBlobBeginCopyFromURLPollOperation(state); - } - if (!copyId) { - state.isCancelled = true; - return makeBlobBeginCopyFromURLPollOperation(state); - } - // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call - await state.blobClient.abortCopyFromURL(copyId, { - abortSignal: options.abortSignal +var cancel = function cancel(options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var state, copyId; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + state = this.state; + copyId = state.copyId; + if (state.isCompleted) { + return [2 /*return*/, makeBlobBeginCopyFromURLPollOperation(state)]; + } + if (!copyId) { + state.isCancelled = true; + return [2 /*return*/, makeBlobBeginCopyFromURLPollOperation(state)]; + } + // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call + return [4 /*yield*/, state.blobClient.abortCopyFromURL(copyId, { + abortSignal: options.abortSignal + })]; + case 1: + // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call + _a.sent(); + state.isCancelled = true; + return [2 /*return*/, makeBlobBeginCopyFromURLPollOperation(state)]; + } + }); }); - state.isCancelled = true; - return makeBlobBeginCopyFromURLPollOperation(state); }; /** * Note: Intentionally using function expression over arrow function expression @@ -33138,48 +35071,64 @@ const cancel = async function cancel(options = {}) { * This affects what `this` refers to. * @hidden */ -const update = async function update(options = {}) { - const state = this.state; - const { blobClient, copySource, startCopyFromURLOptions } = state; - if (!state.isStarted) { - state.isStarted = true; - const result = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions); - // copyId is needed to abort - state.copyId = result.copyId; - if (result.copyStatus === "success") { - state.result = result; - state.isCompleted = true; - } - } - else if (!state.isCompleted) { - try { - const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal }); - const { copyStatus, copyProgress } = result; - const prevCopyProgress = state.copyProgress; - if (copyProgress) { - state.copyProgress = copyProgress; - } - if (copyStatus === "pending" && - copyProgress !== prevCopyProgress && - typeof options.fireProgress === "function") { - // trigger in setTimeout, or swallow error? - options.fireProgress(state); - } - else if (copyStatus === "success") { - state.result = result; - state.isCompleted = true; - } - else if (copyStatus === "failed") { - state.error = new Error(`Blob copy failed with reason: "${result.copyStatusDescription || "unknown"}"`); - state.isCompleted = true; +var update = function update(options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var state, blobClient, copySource, startCopyFromURLOptions, result, result, copyStatus, copyProgress, prevCopyProgress, err_1; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + state = this.state; + blobClient = state.blobClient, copySource = state.copySource, startCopyFromURLOptions = state.startCopyFromURLOptions; + if (!!state.isStarted) return [3 /*break*/, 2]; + state.isStarted = true; + return [4 /*yield*/, blobClient.startCopyFromURL(copySource, startCopyFromURLOptions)]; + case 1: + result = _a.sent(); + // copyId is needed to abort + state.copyId = result.copyId; + if (result.copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } + return [3 /*break*/, 6]; + case 2: + if (!!state.isCompleted) return [3 /*break*/, 6]; + _a.label = 3; + case 3: + _a.trys.push([3, 5, , 6]); + return [4 /*yield*/, state.blobClient.getProperties({ abortSignal: options.abortSignal })]; + case 4: + result = _a.sent(); + copyStatus = result.copyStatus, copyProgress = result.copyProgress; + prevCopyProgress = state.copyProgress; + if (copyProgress) { + state.copyProgress = copyProgress; + } + if (copyStatus === "pending" && + copyProgress !== prevCopyProgress && + typeof options.fireProgress === "function") { + // trigger in setTimeout, or swallow error? + options.fireProgress(state); + } + else if (copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } + else if (copyStatus === "failed") { + state.error = new Error("Blob copy failed with reason: \"" + (result.copyStatusDescription || "unknown") + "\""); + state.isCompleted = true; + } + return [3 /*break*/, 6]; + case 5: + err_1 = _a.sent(); + state.error = err_1; + state.isCompleted = true; + return [3 /*break*/, 6]; + case 6: return [2 /*return*/, makeBlobBeginCopyFromURLPollOperation(state)]; } - } - catch (err) { - state.error = err; - state.isCompleted = true; - } - } - return makeBlobBeginCopyFromURLPollOperation(state); + }); + }); }; /** * Note: Intentionally using function expression over arrow function expression @@ -33187,8 +35136,8 @@ const update = async function update(options = {}) { * This affects what `this` refers to. * @hidden */ -const toString = function toString() { - return JSON.stringify({ state: this.state }, (key, value) => { +var toString = function toString() { + return JSON.stringify({ state: this.state }, function (key, value) { // remove blobClient from serialized state since a client can't be hydrated from this info. if (key === "blobClient") { return undefined; @@ -33202,10 +35151,10 @@ const toString = function toString() { */ function makeBlobBeginCopyFromURLPollOperation(state) { return { - state: Object.assign({}, state), - cancel, - toString, - update + state: tslib.__assign({}, state), + cancel: cancel, + toString: toString, + update: update }; } @@ -33220,14 +35169,14 @@ function makeBlobBeginCopyFromURLPollOperation(state) { */ function rangeToString(iRange) { if (iRange.offset < 0) { - throw new RangeError(`Range.offset cannot be smaller than 0.`); + throw new RangeError("Range.offset cannot be smaller than 0."); } if (iRange.count && iRange.count <= 0) { - throw new RangeError(`Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`); + throw new RangeError("Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end."); } return iRange.count - ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}` - : `bytes=${iRange.offset}-`; + ? "bytes=" + iRange.offset + "-" + (iRange.offset + iRange.count - 1) + : "bytes=" + iRange.offset + "-"; } // Copyright (c) Microsoft Corporation. @@ -33244,12 +35193,13 @@ var BatchStates; * Will stop execute left operations when one of the executed operation throws an error. * But Batch cannot cancel ongoing operations, you need to cancel them by yourself. */ -class Batch { +var Batch = /** @class */ (function () { /** * Creates an instance of Batch. * @param concurrency - */ - constructor(concurrency = 5) { + function Batch(concurrency) { + if (concurrency === void 0) { concurrency = 5; } /** * Number of active operations under execution. */ @@ -33282,53 +35232,69 @@ class Batch { * * @param operation - */ - addOperation(operation) { - this.operations.push(async () => { - try { - this.actives++; - await operation(); - this.actives--; - this.completed++; - this.parallelExecute(); - } - catch (error) { - this.emitter.emit("error", error); - } - }); - } + Batch.prototype.addOperation = function (operation) { + var _this = this; + this.operations.push(function () { return tslib.__awaiter(_this, void 0, void 0, function () { + var error_1; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + _a.trys.push([0, 2, , 3]); + this.actives++; + return [4 /*yield*/, operation()]; + case 1: + _a.sent(); + this.actives--; + this.completed++; + this.parallelExecute(); + return [3 /*break*/, 3]; + case 2: + error_1 = _a.sent(); + this.emitter.emit("error", error_1); + return [3 /*break*/, 3]; + case 3: return [2 /*return*/]; + } + }); + }); }); + }; /** * Start execute operations in the queue. * */ - async do() { - if (this.operations.length === 0) { - return Promise.resolve(); - } - this.parallelExecute(); - return new Promise((resolve, reject) => { - this.emitter.on("finish", resolve); - this.emitter.on("error", (error) => { - this.state = BatchStates.Error; - reject(error); + Batch.prototype.do = function () { + return tslib.__awaiter(this, void 0, void 0, function () { + var _this = this; + return tslib.__generator(this, function (_a) { + if (this.operations.length === 0) { + return [2 /*return*/, Promise.resolve()]; + } + this.parallelExecute(); + return [2 /*return*/, new Promise(function (resolve, reject) { + _this.emitter.on("finish", resolve); + _this.emitter.on("error", function (error) { + _this.state = BatchStates.Error; + reject(error); + }); + })]; }); }); - } + }; /** * Get next operation to be executed. Return null when reaching ends. * */ - nextOperation() { + Batch.prototype.nextOperation = function () { if (this.offset < this.operations.length) { return this.operations[this.offset++]; } return null; - } + }; /** * Start execute operations. One one the most important difference between * this method with do() is that do() wraps as an sync method. * */ - parallelExecute() { + Batch.prototype.parallelExecute = function () { if (this.state === BatchStates.Error) { return; } @@ -33337,7 +35303,7 @@ class Batch { return; } while (this.actives < this.concurrency) { - const operation = this.nextOperation(); + var operation = this.nextOperation(); if (operation) { operation(); } @@ -33345,14 +35311,16 @@ class Batch { return; } } - } -} + }; + return Batch; +}()); // Copyright (c) Microsoft Corporation. /** * This class generates a readable stream from the data in an array of buffers. */ -class BuffersStream extends stream.Readable { +var BuffersStream = /** @class */ (function (_super) { + tslib.__extends(BuffersStream, _super); /** * Creates an instance of BuffersStream that will emit the data * contained in the array of buffers. @@ -33360,44 +35328,46 @@ class BuffersStream extends stream.Readable { * @param buffers - Array of buffers containing the data * @param byteLength - The total length of data contained in the buffers */ - constructor(buffers, byteLength, options) { - super(options); - this.buffers = buffers; - this.byteLength = byteLength; - this.byteOffsetInCurrentBuffer = 0; - this.bufferIndex = 0; - this.pushedBytesLength = 0; + function BuffersStream(buffers, byteLength, options) { + var _this = _super.call(this, options) || this; + _this.buffers = buffers; + _this.byteLength = byteLength; + _this.byteOffsetInCurrentBuffer = 0; + _this.bufferIndex = 0; + _this.pushedBytesLength = 0; // check byteLength is no larger than buffers[] total length - let buffersLength = 0; - for (const buf of this.buffers) { + var buffersLength = 0; + for (var _i = 0, _a = _this.buffers; _i < _a.length; _i++) { + var buf = _a[_i]; buffersLength += buf.byteLength; } - if (buffersLength < this.byteLength) { + if (buffersLength < _this.byteLength) { throw new Error("Data size shouldn't be larger than the total length of buffers."); } + return _this; } /** * Internal _read() that will be called when the stream wants to pull more data in. * * @param size - Optional. The size of data to be read */ - _read(size) { + BuffersStream.prototype._read = function (size) { if (this.pushedBytesLength >= this.byteLength) { this.push(null); } if (!size) { size = this.readableHighWaterMark; } - const outBuffers = []; - let i = 0; + var outBuffers = []; + var i = 0; while (i < size && this.pushedBytesLength < this.byteLength) { // The last buffer may be longer than the data it contains. - const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; - const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; - const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); + var remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; + var remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; + var remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); if (remaining > size - i) { // chunkSize = size - i - const end = this.byteOffsetInCurrentBuffer + size - i; + var end = this.byteOffsetInCurrentBuffer + size - i; outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); this.pushedBytesLength += size - i; this.byteOffsetInCurrentBuffer = end; @@ -33406,7 +35376,7 @@ class BuffersStream extends stream.Readable { } else { // chunkSize = remaining - const end = this.byteOffsetInCurrentBuffer + remaining; + var end = this.byteOffsetInCurrentBuffer + remaining; outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); if (remaining === remainingCapacityInThisBuffer) { // this.buffers[this.bufferIndex] used up, shift to next one @@ -33426,15 +35396,16 @@ class BuffersStream extends stream.Readable { else if (outBuffers.length === 1) { this.push(outBuffers[0]); } - } -} + }; + return BuffersStream; +}(stream.Readable)); // Copyright (c) Microsoft Corporation. /** * maxBufferLength is max size of each buffer in the pooled buffers. */ // Can't use import as Typescript doesn't recognize "buffer". -const maxBufferLength = __webpack_require__(407).constants.MAX_LENGTH; +var maxBufferLength = __webpack_require__(407).constants.MAX_LENGTH; /** * This class provides a buffer container which conceptually has no hard size limit. * It accepts a capacity, an array of input buffers and the total length of input data. @@ -33443,8 +35414,8 @@ const maxBufferLength = __webpack_require__(407).constants.MAX_LENGTH; * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream * assembled from all the data in the internal "buffer". */ -class PooledBuffer { - constructor(capacity, buffers, totalLength) { +var PooledBuffer = /** @class */ (function () { + function PooledBuffer(capacity, buffers, totalLength) { /** * Internal buffers used to keep the data. * Each buffer has a length of the maxBufferLength except last one. @@ -33453,9 +35424,9 @@ class PooledBuffer { this.capacity = capacity; this._size = 0; // allocate - const bufferNum = Math.ceil(capacity / maxBufferLength); - for (let i = 0; i < bufferNum; i++) { - let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; + var bufferNum = Math.ceil(capacity / maxBufferLength); + for (var i = 0; i < bufferNum; i++) { + var len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; if (len === 0) { len = maxBufferLength; } @@ -33465,12 +35436,16 @@ class PooledBuffer { this.fill(buffers, totalLength); } } - /** - * The size of the data contained in the pooled buffers. - */ - get size() { - return this._size; - } + Object.defineProperty(PooledBuffer.prototype, "size", { + /** + * The size of the data contained in the pooled buffers. + */ + get: function () { + return this._size; + }, + enumerable: false, + configurable: true + }); /** * Fill the internal buffers with data in the input buffers serially * with respect to the total length and the total capacity of the internal buffers. @@ -33480,13 +35455,13 @@ class PooledBuffer { * @param totalLength - Total length of the data to be filled in. * */ - fill(buffers, totalLength) { + PooledBuffer.prototype.fill = function (buffers, totalLength) { this._size = Math.min(this.capacity, totalLength); - let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; + var i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; while (totalCopiedNum < this._size) { - const source = buffers[i]; - const target = this.buffers[j]; - const copiedNum = source.copy(target, targetOffset, sourceOffset); + var source = buffers[i]; + var target = this.buffers[j]; + var copiedNum = source.copy(target, targetOffset, sourceOffset); totalCopiedNum += copiedNum; sourceOffset += copiedNum; targetOffset += copiedNum; @@ -33504,15 +35479,16 @@ class PooledBuffer { if (buffers.length > 0) { buffers[0] = buffers[0].slice(sourceOffset); } - } + }; /** * Get the readable stream assembled from all the data in the internal buffers. * */ - getReadableStream() { + PooledBuffer.prototype.getReadableStream = function () { return new BuffersStream(this.buffers, this.size); - } -} + }; + return PooledBuffer; +}()); // Copyright (c) Microsoft Corporation. /** @@ -33537,7 +35513,7 @@ class PooledBuffer { * in this situation, outgoing handlers are blocked. * Outgoing queue shouldn't be empty. */ -class BufferScheduler { +var BufferScheduler = /** @class */ (function () { /** * Creates an instance of BufferScheduler. * @@ -33550,7 +35526,7 @@ class BufferScheduler { * @param concurrency - Concurrency of executing outgoingHandlers (>0) * @param encoding - [Optional] Encoding of Readable stream when it's a string stream */ - constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { + function BufferScheduler(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { /** * An internal event emitter. */ @@ -33596,13 +35572,13 @@ class BufferScheduler { */ this.outgoing = []; if (bufferSize <= 0) { - throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`); + throw new RangeError("bufferSize must be larger than 0, current is " + bufferSize); } if (maxBuffers <= 0) { - throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`); + throw new RangeError("maxBuffers must be larger than 0, current is " + maxBuffers); } if (concurrency <= 0) { - throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`); + throw new RangeError("concurrency must be larger than 0, current is " + concurrency); } this.bufferSize = bufferSize; this.maxBuffers = maxBuffers; @@ -33616,64 +35592,69 @@ class BufferScheduler { * returns error. * */ - async do() { - return new Promise((resolve, reject) => { - this.readable.on("data", (data) => { - data = typeof data === "string" ? Buffer.from(data, this.encoding) : data; - this.appendUnresolvedData(data); - if (!this.resolveData()) { - this.readable.pause(); - } - }); - this.readable.on("error", (err) => { - this.emitter.emit("error", err); - }); - this.readable.on("end", () => { - this.isStreamEnd = true; - this.emitter.emit("checkEnd"); - }); - this.emitter.on("error", (err) => { - this.isError = true; - this.readable.pause(); - reject(err); - }); - this.emitter.on("checkEnd", () => { - if (this.outgoing.length > 0) { - this.triggerOutgoingHandlers(); - return; - } - if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { - if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { - const buffer = this.shiftBufferFromUnresolvedDataArray(); - this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset) - .then(resolve) - .catch(reject); - } - else if (this.unresolvedLength >= this.bufferSize) { - return; - } - else { - resolve(); - } - } + BufferScheduler.prototype.do = function () { + return tslib.__awaiter(this, void 0, void 0, function () { + var _this = this; + return tslib.__generator(this, function (_a) { + return [2 /*return*/, new Promise(function (resolve, reject) { + _this.readable.on("data", function (data) { + data = typeof data === "string" ? Buffer.from(data, _this.encoding) : data; + _this.appendUnresolvedData(data); + if (!_this.resolveData()) { + _this.readable.pause(); + } + }); + _this.readable.on("error", function (err) { + _this.emitter.emit("error", err); + }); + _this.readable.on("end", function () { + _this.isStreamEnd = true; + _this.emitter.emit("checkEnd"); + }); + _this.emitter.on("error", function (err) { + _this.isError = true; + _this.readable.pause(); + reject(err); + }); + _this.emitter.on("checkEnd", function () { + if (_this.outgoing.length > 0) { + _this.triggerOutgoingHandlers(); + return; + } + if (_this.isStreamEnd && _this.executingOutgoingHandlers === 0) { + if (_this.unresolvedLength > 0 && _this.unresolvedLength < _this.bufferSize) { + var buffer_1 = _this.shiftBufferFromUnresolvedDataArray(); + _this.outgoingHandler(function () { return buffer_1.getReadableStream(); }, buffer_1.size, _this.offset) + .then(resolve) + .catch(reject); + } + else if (_this.unresolvedLength >= _this.bufferSize) { + return; + } + else { + resolve(); + } + } + }); + })]; }); }); - } + }; /** * Insert a new data into unresolved array. * * @param data - */ - appendUnresolvedData(data) { + BufferScheduler.prototype.appendUnresolvedData = function (data) { this.unresolvedDataArray.push(data); this.unresolvedLength += data.length; - } + }; /** * Try to shift a buffer with size in blockSize. The buffer returned may be less * than blockSize when data in unresolvedDataArray is less than bufferSize. * */ - shiftBufferFromUnresolvedDataArray(buffer) { + BufferScheduler.prototype.shiftBufferFromUnresolvedDataArray = function (buffer) { if (!buffer) { buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength); } @@ -33682,7 +35663,7 @@ class BufferScheduler { } this.unresolvedLength -= buffer.size; return buffer; - } + }; /** * Resolve data in unresolvedDataArray. For every buffer with size in blockSize * shifted, it will try to get (or allocate a buffer) from incoming, and fill it, @@ -33692,9 +35673,9 @@ class BufferScheduler { * * @returns Return false when buffers in incoming are not enough, else true. */ - resolveData() { + BufferScheduler.prototype.resolveData = function () { while (this.unresolvedLength >= this.bufferSize) { - let buffer; + var buffer = void 0; if (this.incoming.length > 0) { buffer = this.incoming.shift(); this.shiftBufferFromUnresolvedDataArray(buffer); @@ -33713,55 +35694,75 @@ class BufferScheduler { this.triggerOutgoingHandlers(); } return true; - } + }; /** * Try to trigger a outgoing handler for every buffer in outgoing. Stop when * concurrency reaches. */ - async triggerOutgoingHandlers() { - let buffer; - do { - if (this.executingOutgoingHandlers >= this.concurrency) { - return; - } - buffer = this.outgoing.shift(); - if (buffer) { - this.triggerOutgoingHandler(buffer); - } - } while (buffer); - } + BufferScheduler.prototype.triggerOutgoingHandlers = function () { + return tslib.__awaiter(this, void 0, void 0, function () { + var buffer; + return tslib.__generator(this, function (_a) { + do { + if (this.executingOutgoingHandlers >= this.concurrency) { + return [2 /*return*/]; + } + buffer = this.outgoing.shift(); + if (buffer) { + this.triggerOutgoingHandler(buffer); + } + } while (buffer); + return [2 /*return*/]; + }); + }); + }; /** * Trigger a outgoing handler for a buffer shifted from outgoing. * * @param buffer - */ - async triggerOutgoingHandler(buffer) { - const bufferLength = buffer.size; - this.executingOutgoingHandlers++; - this.offset += bufferLength; - try { - await this.outgoingHandler(() => buffer.getReadableStream(), bufferLength, this.offset - bufferLength); - } - catch (err) { - this.emitter.emit("error", err); - return; - } - this.executingOutgoingHandlers--; - this.reuseBuffer(buffer); - this.emitter.emit("checkEnd"); - } + BufferScheduler.prototype.triggerOutgoingHandler = function (buffer) { + return tslib.__awaiter(this, void 0, void 0, function () { + var bufferLength, err_1; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + bufferLength = buffer.size; + this.executingOutgoingHandlers++; + this.offset += bufferLength; + _a.label = 1; + case 1: + _a.trys.push([1, 3, , 4]); + return [4 /*yield*/, this.outgoingHandler(function () { return buffer.getReadableStream(); }, bufferLength, this.offset - bufferLength)]; + case 2: + _a.sent(); + return [3 /*break*/, 4]; + case 3: + err_1 = _a.sent(); + this.emitter.emit("error", err_1); + return [2 /*return*/]; + case 4: + this.executingOutgoingHandlers--; + this.reuseBuffer(buffer); + this.emitter.emit("checkEnd"); + return [2 /*return*/]; + } + }); + }); + }; /** * Return buffer used by outgoing handler into incoming. * * @param buffer - */ - reuseBuffer(buffer) { + BufferScheduler.prototype.reuseBuffer = function (buffer) { this.incoming.push(buffer); if (!this.isError && this.resolveData() && !this.isStreamEnd) { this.readable.resume(); } - } -} + }; + return BufferScheduler; +}()); // Copyright (c) Microsoft Corporation. /** @@ -33773,34 +35774,39 @@ class BufferScheduler { * @param end - To which position in the buffer to be filled, exclusive * @param encoding - Encoding of the Readable stream */ -async function streamToBuffer(stream, buffer, offset, end, encoding) { - let pos = 0; // Position in stream - const count = end - offset; // Total amount of data needed in stream - return new Promise((resolve, reject) => { - stream.on("readable", () => { - if (pos >= count) { - resolve(); - return; - } - let chunk = stream.read(); - if (!chunk) { - return; - } - if (typeof chunk === "string") { - chunk = Buffer.from(chunk, encoding); - } - // How much data needed in this chunk - const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; - buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); - pos += chunkLength; - }); - stream.on("end", () => { - if (pos < count) { - reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); - } - resolve(); +function streamToBuffer(stream, buffer, offset, end, encoding) { + return tslib.__awaiter(this, void 0, void 0, function () { + var pos, count; + return tslib.__generator(this, function (_a) { + pos = 0; + count = end - offset; + return [2 /*return*/, new Promise(function (resolve, reject) { + stream.on("readable", function () { + if (pos >= count) { + resolve(); + return; + } + var chunk = stream.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + // How much data needed in this chunk + var chunkLength = pos + chunk.length > count ? count - pos : chunk.length; + buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); + pos += chunkLength; + }); + stream.on("end", function () { + if (pos < count) { + reject(new Error("Stream drains before getting enough data needed. Data read: " + pos + ", data need: " + count)); + } + resolve(); + }); + stream.on("error", reject); + })]; }); - stream.on("error", reject); }); } /** @@ -33812,29 +35818,34 @@ async function streamToBuffer(stream, buffer, offset, end, encoding) { * @returns with the count of bytes read. * @throws `RangeError` If buffer size is not big enough. */ -async function streamToBuffer2(stream, buffer, encoding) { - let pos = 0; // Position in stream - const bufferSize = buffer.length; - return new Promise((resolve, reject) => { - stream.on("readable", () => { - let chunk = stream.read(); - if (!chunk) { - return; - } - if (typeof chunk === "string") { - chunk = Buffer.from(chunk, encoding); - } - if (pos + chunk.length > bufferSize) { - reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); - return; - } - buffer.fill(chunk, pos, pos + chunk.length); - pos += chunk.length; - }); - stream.on("end", () => { - resolve(pos); +function streamToBuffer2(stream, buffer, encoding) { + return tslib.__awaiter(this, void 0, void 0, function () { + var pos, bufferSize; + return tslib.__generator(this, function (_a) { + pos = 0; + bufferSize = buffer.length; + return [2 /*return*/, new Promise(function (resolve, reject) { + stream.on("readable", function () { + var chunk = stream.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + if (pos + chunk.length > bufferSize) { + reject(new Error("Stream exceeds buffer size. Buffer size: " + bufferSize)); + return; + } + buffer.fill(chunk, pos, pos + chunk.length); + pos += chunk.length; + }); + stream.on("end", function () { + resolve(pos); + }); + stream.on("error", reject); + })]; }); - stream.on("error", reject); }); } /** @@ -33845,17 +35856,21 @@ async function streamToBuffer2(stream, buffer, encoding) { * @param rs - The read stream. * @param file - Destination file path. */ -async function readStreamToLocalFile(rs, file) { - return new Promise((resolve, reject) => { - const ws = fs.createWriteStream(file); - rs.on("error", (err) => { - reject(err); - }); - ws.on("error", (err) => { - reject(err); +function readStreamToLocalFile(rs, file) { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + return [2 /*return*/, new Promise(function (resolve, reject) { + var ws = fs.createWriteStream(file); + rs.on("error", function (err) { + reject(err); + }); + ws.on("error", function (err) { + reject(err); + }); + ws.on("close", resolve); + rs.pipe(ws); + })]; }); - ws.on("close", resolve); - rs.pipe(ws); }); } /** @@ -33863,18 +35878,21 @@ async function readStreamToLocalFile(rs, file) { * * Promisified version of fs.stat(). */ -const fsStat = util.promisify(fs.stat); -const fsCreateReadStream = fs.createReadStream; +var fsStat = util.promisify(fs.stat); +var fsCreateReadStream = fs.createReadStream; /** * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, * append blob, or page blob. */ -class BlobClient extends StorageClient { - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { +var BlobClient = /** @class */ (function (_super) { + tslib.__extends(BlobClient, _super); + function BlobClient(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { + var _a; + var _this = this; options = options || {}; - let pipeline; - let url; + var pipeline; + var url; if (credentialOrPipelineOrContainerName instanceof Pipeline) { // (url: string, pipeline: Pipeline) url = urlOrConnectionString; @@ -33900,12 +35918,12 @@ class BlobClient extends StorageClient { blobNameOrOptions && typeof blobNameOrOptions === "string") { // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) - const containerName = credentialOrPipelineOrContainerName; - const blobName = blobNameOrOptions; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + var containerName = credentialOrPipelineOrContainerName; + var blobName = blobNameOrOptions; + var extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); pipeline = newPipeline(sharedKeyCredential, options); @@ -33925,27 +35943,33 @@ class BlobClient extends StorageClient { else { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } - super(url, pipeline); - ({ - blobName: this._name, - containerName: this._containerName - } = this.getBlobAndContainerNamesFromUrl()); - this.blobContext = new Blob$1(this.storageClientContext); - this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); - this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); - } - /** - * The name of the blob. - */ - get name() { - return this._name; - } - /** - * The name of the storage container the blob is associated with. - */ - get containerName() { - return this._containerName; + _this = _super.call(this, url, pipeline) || this; + (_a = _this.getBlobAndContainerNamesFromUrl(), _this._name = _a.blobName, _this._containerName = _a.containerName); + _this.blobContext = new Blob$1(_this.storageClientContext); + _this._snapshot = getURLParameter(_this.url, URLConstants.Parameters.SNAPSHOT); + _this._versionId = getURLParameter(_this.url, URLConstants.Parameters.VERSIONID); + return _this; } + Object.defineProperty(BlobClient.prototype, "name", { + /** + * The name of the blob. + */ + get: function () { + return this._name; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobClient.prototype, "containerName", { + /** + * The name of the storage container the blob is associated with. + */ + get: function () { + return this._containerName; + }, + enumerable: false, + configurable: true + }); /** * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. * Provide "" will remove the snapshot and return a Client to the base blob. @@ -33953,9 +35977,9 @@ class BlobClient extends StorageClient { * @param snapshot - The snapshot timestamp. * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp */ - withSnapshot(snapshot) { + BlobClient.prototype.withSnapshot = function (snapshot) { return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); - } + }; /** * Creates a new BlobClient object pointing to a version of this blob. * Provide "" will remove the versionId and return a Client to the base blob. @@ -33963,30 +35987,30 @@ class BlobClient extends StorageClient { * @param versionId - The versionId. * @returns A new BlobClient object pointing to the version of this blob. */ - withVersion(versionId) { + BlobClient.prototype.withVersion = function (versionId) { return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId.length === 0 ? undefined : versionId), this.pipeline); - } + }; /** * Creates a AppendBlobClient object. * */ - getAppendBlobClient() { + BlobClient.prototype.getAppendBlobClient = function () { return new AppendBlobClient(this.url, this.pipeline); - } + }; /** * Creates a BlockBlobClient object. * */ - getBlockBlobClient() { + BlobClient.prototype.getBlockBlobClient = function () { return new BlockBlobClient(this.url, this.pipeline); - } + }; /** * Creates a PageBlobClient object. * */ - getPageBlobClient() { + BlobClient.prototype.getPageBlobClient = function () { return new PageBlobClient(this.url, this.pipeline); - } + }; /** * Reads or downloads a blob from the system, including its metadata and properties. * You can also call Get Blob to read a snapshot. @@ -34046,77 +36070,98 @@ class BlobClient extends StorageClient { * } * ``` */ - async download(offset = 0, count, options = {}) { + BlobClient.prototype.download = function (offset, count, options) { var _a; - options.conditions = options.conditions || {}; - options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const { span, updatedOptions } = createSpan("BlobClient-download", options); - try { - const res = await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onDownloadProgress: coreHttp.isNode ? undefined : options.onProgress // for Node.js, progress is reported by RetriableReadableStream - }, range: offset === 0 && !count ? undefined : rangeToString({ offset, count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); - // Return browser response immediately - if (false) {} - // We support retrying when download stream unexpected ends in Node.js runtime - // Following code shouldn't be bundled into browser build, however some - // bundlers may try to bundle following code and "FileReadResponse.ts". - // In this case, "FileDownloadResponse.browser.ts" will be used as a shim of "FileDownloadResponse.ts" - // The config is in package.json "browser" field - if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) { - // TODO: Default value or make it a required parameter? - options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; - } - if (res.contentLength === undefined) { - throw new RangeError(`File download response doesn't contain valid content length header`); - } - if (!res.etag) { - throw new RangeError(`File download response doesn't contain valid etag header`); - } - return new BlobDownloadResponse(wrappedRes, async (start) => { - var _a; - const updatedOptions = { - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ifMatch: options.conditions.ifMatch || res.etag, - ifModifiedSince: options.conditions.ifModifiedSince, - ifNoneMatch: options.conditions.ifNoneMatch, - ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, - ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions - }, - range: rangeToString({ - count: offset + res.contentLength - start, - offset: start - }), - rangeGetContentMD5: options.rangeGetContentMD5, - rangeGetContentCRC64: options.rangeGetContentCrc64, - snapshot: options.snapshot, - cpkInfo: options.customerProvidedKey - }; - // Debug purpose only - // console.log( - // `Read from internal stream, range: ${ - // updatedOptions.range - // }, options: ${JSON.stringify(updatedOptions)}` - // ); - return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedOptions))).readableStreamBody; - }, offset, res.contentLength, { - maxRetryRequests: options.maxRetryRequests, - onProgress: options.onProgress - }); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (offset === void 0) { offset = 0; } + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, res_1, wrappedRes, e_1; + var _this = this; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + options.conditions = options.conditions || {}; + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + _b = createSpan("BlobClient-download", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.blobContext.download(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onDownloadProgress: coreHttp.isNode ? undefined : options.onProgress // for Node.js, progress is reported by RetriableReadableStream + }, range: offset === 0 && !count ? undefined : rangeToString({ offset: offset, count: count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: + res_1 = _c.sent(); + wrappedRes = tslib.__assign(tslib.__assign({}, res_1), { _response: res_1._response, objectReplicationDestinationPolicyId: res_1.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res_1.objectReplicationRules) }); + // We support retrying when download stream unexpected ends in Node.js runtime + // Following code shouldn't be bundled into browser build, however some + // bundlers may try to bundle following code and "FileReadResponse.ts". + // In this case, "FileDownloadResponse.browser.ts" will be used as a shim of "FileDownloadResponse.ts" + // The config is in package.json "browser" field + if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) { + // TODO: Default value or make it a required parameter? + options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; + } + if (res_1.contentLength === undefined) { + throw new RangeError("File download response doesn't contain valid content length header"); + } + if (!res_1.etag) { + throw new RangeError("File download response doesn't contain valid etag header"); + } + return [2 /*return*/, new BlobDownloadResponse(wrappedRes, function (start) { return tslib.__awaiter(_this, void 0, void 0, function () { + var updatedOptions; + var _a; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + updatedOptions = { + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ifMatch: options.conditions.ifMatch || res_1.etag, + ifModifiedSince: options.conditions.ifModifiedSince, + ifNoneMatch: options.conditions.ifNoneMatch, + ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, + ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions + }, + range: rangeToString({ + count: offset + res_1.contentLength - start, + offset: start + }), + rangeGetContentMD5: options.rangeGetContentMD5, + rangeGetContentCRC64: options.rangeGetContentCrc64, + snapshot: options.snapshot, + cpkInfo: options.customerProvidedKey + }; + return [4 /*yield*/, this.blobContext.download(tslib.__assign({ abortSignal: options.abortSignal }, updatedOptions))]; + case 1: + // Debug purpose only + // console.log( + // `Read from internal stream, range: ${ + // updatedOptions.range + // }, options: ${JSON.stringify(updatedOptions)}` + // ); + return [2 /*return*/, (_b.sent()).readableStreamBody]; + } + }); + }); }, offset, res_1.contentLength, { + maxRetryRequests: options.maxRetryRequests, + onProgress: options.onProgress + })]; + case 3: + e_1 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_1.message + }); + throw e_1; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Returns true if the Azure blob resource represented by this client exists; false otherwise. * @@ -34126,36 +36171,49 @@ class BlobClient extends StorageClient { * * @param options - options to Exists operation. */ - async exists(options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-exists", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - await this.getProperties({ - abortSignal: options.abortSignal, - customerProvidedKey: options.customerProvidedKey, - conditions: options.conditions, - tracingOptions: updatedOptions.tracingOptions - }); - return true; - } - catch (e) { - if (e.statusCode === 404) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when checking blob existence" - }); - return false; - } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobClient.prototype.exists = function (options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_2; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobClient-exists", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.getProperties({ + abortSignal: options.abortSignal, + customerProvidedKey: options.customerProvidedKey, + conditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions + })]; + case 2: + _b.sent(); + return [2 /*return*/, true]; + case 3: + e_2 = _b.sent(); + if (e_2.statusCode === 404) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when checking blob existence" + }); + return [2 /*return*/, false]; + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_2.message + }); + throw e_2; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Returns all user-defined metadata, standard HTTP properties, and system properties * for the blob. It does not return the content of the blob. @@ -34168,26 +36226,39 @@ class BlobClient extends StorageClient { * * @param options - Optional options to Get Properties operation. */ - async getProperties(options = {}) { + BlobClient.prototype.getProperties = function (options) { var _a; - const { span, updatedOptions } = createSpan("BlobClient-getProperties", options); - try { - options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const res = await this.blobContext.getProperties(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); - return Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, res, e_3; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("BlobClient-getProperties", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.blobContext.getProperties(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: + res = _c.sent(); + return [2 /*return*/, tslib.__assign(tslib.__assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) })]; + case 3: + e_3 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_3.message + }); + throw e_3; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Marks the specified blob or snapshot for deletion. The blob is later deleted * during garbage collection. Note that in order to delete a blob, you must delete @@ -34197,24 +36268,36 @@ class BlobClient extends StorageClient { * * @param options - Optional options to Blob Delete operation. */ - async delete(options = {}) { + BlobClient.prototype.delete = function (options) { var _a; - const { span, updatedOptions } = createSpan("BlobClient-delete", options); - options.conditions = options.conditions || {}; - try { - return await this.blobContext.delete(Object.assign({ abortSignal: options.abortSignal, deleteSnapshots: options.deleteSnapshots, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_4; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("BlobClient-delete", options), span = _b.span, updatedOptions = _b.updatedOptions; + options.conditions = options.conditions || {}; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.blobContext.delete(tslib.__assign({ abortSignal: options.abortSignal, deleteSnapshots: options.deleteSnapshots, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_4 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_4.message + }); + throw e_4; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted * during garbage collection. Note that in order to delete a blob, you must delete @@ -34224,32 +36307,45 @@ class BlobClient extends StorageClient { * * @param options - Optional options to Blob Delete operation. */ - async deleteIfExists(options = {}) { + BlobClient.prototype.deleteIfExists = function (options) { var _a, _b; - const { span, updatedOptions } = createSpan("BlobClient-deleteIfExists", options); - try { - const res = await this.delete(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when deleting a blob or snapshot only if it exists." - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); - } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _c, span, updatedOptions, res, e_5; + return tslib.__generator(this, function (_d) { + switch (_d.label) { + case 0: + _c = createSpan("BlobClient-deleteIfExists", options), span = _c.span, updatedOptions = _c.updatedOptions; + _d.label = 1; + case 1: + _d.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.delete(updatedOptions)]; + case 2: + res = _d.sent(); + return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable + })]; + case 3: + e_5 = _d.sent(); + if (((_a = e_5.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when deleting a blob or snapshot only if it exists." + }); + return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_5.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_5.response })]; + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_5.message + }); + throw e_5; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Restores the contents and metadata of soft deleted blob and any associated * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 @@ -34258,22 +36354,34 @@ class BlobClient extends StorageClient { * * @param options - Optional options to Blob Undelete operation. */ - async undelete(options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-undelete", options); - try { - return await this.blobContext.undelete(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobClient.prototype.undelete = function (options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_6; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobClient-undelete", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.blobContext.undelete(tslib.__assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_6 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_6.message + }); + throw e_6; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Sets system properties on the blob. * @@ -34286,25 +36394,37 @@ class BlobClient extends StorageClient { * headers without a value will be cleared. * @param options - Optional options to Blob Set HTTP Headers operation. */ - async setHTTPHeaders(blobHTTPHeaders, options = {}) { + BlobClient.prototype.setHTTPHeaders = function (blobHTTPHeaders, options) { var _a; - const { span, updatedOptions } = createSpan("BlobClient-setHTTPHeaders", options); - options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blobContext.setHttpHeaders(Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_7; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("BlobClient-setHTTPHeaders", options), span = _b.span, updatedOptions = _b.updatedOptions; + options.conditions = options.conditions || {}; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.blobContext.setHttpHeaders(tslib.__assign({ abortSignal: options.abortSignal, blobHttpHeaders: blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_7 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_7.message + }); + throw e_7; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Sets user-defined metadata for the specified blob as one or more name-value pairs. * @@ -34316,25 +36436,37 @@ class BlobClient extends StorageClient { * If no value provided the existing metadata will be removed. * @param options - Optional options to Set Metadata operation. */ - async setMetadata(metadata, options = {}) { + BlobClient.prototype.setMetadata = function (metadata, options) { var _a; - const { span, updatedOptions } = createSpan("BlobClient-setMetadata", options); - options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blobContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_8; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("BlobClient-setMetadata", options), span = _b.span, updatedOptions = _b.updatedOptions; + options.conditions = options.conditions || {}; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.blobContext.setMetadata(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: metadata, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_8 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_8.message + }); + throw e_8; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Sets tags on the underlying blob. * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. @@ -34344,81 +36476,118 @@ class BlobClient extends StorageClient { * @param tags - * @param options - */ - async setTags(tags, options = {}) { + BlobClient.prototype.setTags = function (tags, options) { var _a; - const { span, updatedOptions } = createSpan("BlobClient-setTags", options); - try { - return await this.blobContext.setTags(Object.assign(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)), { tags: toBlobTags(tags) })); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_9; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("BlobClient-setTags", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.blobContext.setTags(tslib.__assign(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)), { tags: toBlobTags(tags) }))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_9 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_9.message + }); + throw e_9; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Gets the tags associated with the underlying blob. * * @param options - */ - async getTags(options = {}) { + BlobClient.prototype.getTags = function (options) { var _a; - const { span, updatedOptions } = createSpan("BlobClient-getTags", options); - try { - const response = await this.blobContext.getTags(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} }); - return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, response, wrappedResponse, e_10; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("BlobClient-getTags", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.blobContext.getTags(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: + response = _c.sent(); + wrappedResponse = tslib.__assign(tslib.__assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} }); + return [2 /*return*/, wrappedResponse]; + case 3: + e_10 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_10.message + }); + throw e_10; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Get a {@link BlobLeaseClient} that manages leases on the blob. * * @param proposeLeaseId - Initial proposed lease Id. * @returns A new BlobLeaseClient object for managing leases on the blob. */ - getBlobLeaseClient(proposeLeaseId) { + BlobClient.prototype.getBlobLeaseClient = function (proposeLeaseId) { return new BlobLeaseClient(this, proposeLeaseId); - } + }; /** * Creates a read-only snapshot of a blob. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob * * @param options - Optional options to the Blob Create Snapshot operation. */ - async createSnapshot(options = {}) { + BlobClient.prototype.createSnapshot = function (options) { var _a; - const { span, updatedOptions } = createSpan("BlobClient-createSnapshot", options); - options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blobContext.createSnapshot(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_11; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("BlobClient-createSnapshot", options), span = _b.span, updatedOptions = _b.updatedOptions; + options.conditions = options.conditions || {}; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.blobContext.createSnapshot(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_11 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_11.message + }); + throw e_11; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Asynchronously copies a blob to a destination within the storage account. * This method returns a long running operation poller that allows you to wait @@ -34491,25 +36660,57 @@ class BlobClient extends StorageClient { * @param copySource - url to the source Azure Blob/File. * @param options - Optional options to the Blob Start Copy From URL operation. */ - async beginCopyFromURL(copySource, options = {}) { - const client = { - abortCopyFromURL: (...args) => this.abortCopyFromURL(...args), - getProperties: (...args) => this.getProperties(...args), - startCopyFromURL: (...args) => this.startCopyFromURL(...args) - }; - const poller = new BlobBeginCopyFromUrlPoller({ - blobClient: client, - copySource, - intervalInMs: options.intervalInMs, - onProgress: options.onProgress, - resumeFrom: options.resumeFrom, - startCopyFromURLOptions: options + BlobClient.prototype.beginCopyFromURL = function (copySource, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var client, poller; + var _this = this; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + client = { + abortCopyFromURL: function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return _this.abortCopyFromURL.apply(_this, args); + }, + getProperties: function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return _this.getProperties.apply(_this, args); + }, + startCopyFromURL: function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return _this.startCopyFromURL.apply(_this, args); + } + }; + poller = new BlobBeginCopyFromUrlPoller({ + blobClient: client, + copySource: copySource, + intervalInMs: options.intervalInMs, + onProgress: options.onProgress, + resumeFrom: options.resumeFrom, + startCopyFromURLOptions: options + }); + // Trigger the startCopyFromURL call by calling poll. + // Any errors from this method should be surfaced to the user. + return [4 /*yield*/, poller.poll()]; + case 1: + // Trigger the startCopyFromURL call by calling poll. + // Any errors from this method should be surfaced to the user. + _a.sent(); + return [2 /*return*/, poller]; + } + }); }); - // Trigger the startCopyFromURL call by calling poll. - // Any errors from this method should be surfaced to the user. - await poller.poll(); - return poller; - } + }; /** * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero * length and full metadata. Version 2012-02-12 and newer. @@ -34518,22 +36719,34 @@ class BlobClient extends StorageClient { * @param copyId - Id of the Copy From URL operation. * @param options - Optional options to the Blob Abort Copy From URL operation. */ - async abortCopyFromURL(copyId, options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-abortCopyFromURL", options); - try { - return await this.blobContext.abortCopyFromURL(copyId, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobClient.prototype.abortCopyFromURL = function (copyId, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_12; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobClient-abortCopyFromURL", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.blobContext.abortCopyFromURL(copyId, tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_12 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_12.message + }); + throw e_12; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not * return a response until the copy is complete. @@ -34542,30 +36755,42 @@ class BlobClient extends StorageClient { * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication * @param options - */ - async syncCopyFromURL(copySource, options = {}) { + BlobClient.prototype.syncCopyFromURL = function (copySource, options) { var _a; - const { span, updatedOptions } = createSpan("BlobClient-syncCopyFromURL", options); - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; - try { - return await this.blobContext.copyFromURL(copySource, Object.assign({ abortSignal: options.abortSignal, metadata: options.metadata, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince - }, sourceContentMD5: options.sourceContentMD5, blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_13; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("BlobClient-syncCopyFromURL", options), span = _b.span, updatedOptions = _b.updatedOptions; + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.blobContext.copyFromURL(copySource, tslib.__assign({ abortSignal: options.abortSignal, metadata: options.metadata, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince + }, sourceContentMD5: options.sourceContentMD5, blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_13 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_13.message + }); + throw e_13; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Sets the tier on a blob. The operation is allowed on a page blob in a premium * storage account and on a block blob in a blob storage account (locally redundant @@ -34577,119 +36802,159 @@ class BlobClient extends StorageClient { * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. * @param options - Optional options to the Blob Set Tier operation. */ - async setAccessTier(tier, options = {}) { + BlobClient.prototype.setAccessTier = function (tier, options) { var _a; - const { span, updatedOptions } = createSpan("BlobClient-setAccessTier", options); - try { - return await this.blobContext.setTier(toAccessTier(tier), Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), rehydratePriority: options.rehydratePriority }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } - finally { - span.end(); - } - } - async downloadToBuffer(param1, param2, param3, param4 = {}) { - let buffer; - let offset = 0; - let count = 0; - let options = param4; - if (param1 instanceof Buffer) { - buffer = param1; - offset = param2 || 0; - count = typeof param3 === "number" ? param3 : 0; - } - else { - offset = typeof param1 === "number" ? param1 : 0; - count = typeof param2 === "number" ? param2 : 0; - options = param3 || {}; - } - const { span, updatedOptions } = createSpan("BlobClient-downloadToBuffer", options); - try { - if (!options.blockSize) { - options.blockSize = 0; - } - if (options.blockSize < 0) { - throw new RangeError("blockSize option must be >= 0"); - } - if (options.blockSize === 0) { - options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; - } - if (offset < 0) { - throw new RangeError("offset option must be >= 0"); - } - if (count && count <= 0) { - throw new RangeError("count option must be greater than 0"); - } - if (!options.conditions) { - options.conditions = {}; - } - // Customer doesn't specify length, get it - if (!count) { - const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); - count = response.contentLength - offset; - if (count < 0) { - throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); - } - } - // Allocate the buffer of size = count if the buffer is not provided - if (!buffer) { - try { - buffer = Buffer.alloc(count); + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_14; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("BlobClient-setAccessTier", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.blobContext.setTier(toAccessTier(tier), tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), rehydratePriority: options.rehydratePriority }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_14 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_14.message + }); + throw e_14; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; } - catch (error) { - throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile".\t ${error.message}`); + }); + }); + }; + BlobClient.prototype.downloadToBuffer = function (param1, param2, param3, param4) { + if (param4 === void 0) { param4 = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var buffer, offset, count, options, _a, span, updatedOptions, response, transferProgress_1, batch, _loop_1, off, e_15; + var _this = this; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + offset = 0; + count = 0; + options = param4; + if (param1 instanceof Buffer) { + buffer = param1; + offset = param2 || 0; + count = typeof param3 === "number" ? param3 : 0; + } + else { + offset = typeof param1 === "number" ? param1 : 0; + count = typeof param2 === "number" ? param2 : 0; + options = param3 || {}; + } + _a = createSpan("BlobClient-downloadToBuffer", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 5, 6, 7]); + if (!options.blockSize) { + options.blockSize = 0; + } + if (options.blockSize < 0) { + throw new RangeError("blockSize option must be >= 0"); + } + if (options.blockSize === 0) { + options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + if (offset < 0) { + throw new RangeError("offset option must be >= 0"); + } + if (count && count <= 0) { + throw new RangeError("count option must be greater than 0"); + } + if (!options.conditions) { + options.conditions = {}; + } + if (!!count) return [3 /*break*/, 3]; + return [4 /*yield*/, this.getProperties(tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) }))]; + case 2: + response = _b.sent(); + count = response.contentLength - offset; + if (count < 0) { + throw new RangeError("offset " + offset + " shouldn't be larger than blob size " + response.contentLength); + } + _b.label = 3; + case 3: + // Allocate the buffer of size = count if the buffer is not provided + if (!buffer) { + try { + buffer = Buffer.alloc(count); + } + catch (error) { + throw new Error("Unable to allocate the buffer of size: " + count + "(in bytes). Please try passing your own buffer to the \"downloadToBuffer\" method or try using other methods like \"download\" or \"downloadToFile\".\t " + error.message); + } + } + if (buffer.length < count) { + throw new RangeError("The buffer's size should be equal to or larger than the request count of bytes: " + count); + } + transferProgress_1 = 0; + batch = new Batch(options.concurrency); + _loop_1 = function (off) { + batch.addOperation(function () { return tslib.__awaiter(_this, void 0, void 0, function () { + var chunkEnd, response, stream; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + chunkEnd = offset + count; + if (off + options.blockSize < chunkEnd) { + chunkEnd = off + options.blockSize; + } + return [4 /*yield*/, this.download(off, chunkEnd - off, { + abortSignal: options.abortSignal, + conditions: options.conditions, + maxRetryRequests: options.maxRetryRequestsPerBlock, + customerProvidedKey: options.customerProvidedKey, + tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) + })]; + case 1: + response = _a.sent(); + stream = response.readableStreamBody; + return [4 /*yield*/, streamToBuffer(stream, buffer, off - offset, chunkEnd - offset)]; + case 2: + _a.sent(); + // Update progress after block is downloaded, in case of block trying + // Could provide finer grained progress updating inside HTTP requests, + // only if convenience layer download try is enabled + transferProgress_1 += chunkEnd - off; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress_1 }); + } + return [2 /*return*/]; + } + }); + }); }); + }; + for (off = offset; off < offset + count; off = off + options.blockSize) { + _loop_1(off); + } + return [4 /*yield*/, batch.do()]; + case 4: + _b.sent(); + return [2 /*return*/, buffer]; + case 5: + e_15 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_15.message + }); + throw e_15; + case 6: + span.end(); + return [7 /*endfinally*/]; + case 7: return [2 /*return*/]; } - } - if (buffer.length < count) { - throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`); - } - let transferProgress = 0; - const batch = new Batch(options.concurrency); - for (let off = offset; off < offset + count; off = off + options.blockSize) { - batch.addOperation(async () => { - // Exclusive chunk end position - let chunkEnd = offset + count; - if (off + options.blockSize < chunkEnd) { - chunkEnd = off + options.blockSize; - } - const response = await this.download(off, chunkEnd - off, { - abortSignal: options.abortSignal, - conditions: options.conditions, - maxRetryRequests: options.maxRetryRequestsPerBlock, - customerProvidedKey: options.customerProvidedKey, - tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) - }); - const stream = response.readableStreamBody; - await streamToBuffer(stream, buffer, off - offset, chunkEnd - offset); - // Update progress after block is downloaded, in case of block trying - // Could provide finer grained progress updating inside HTTP requests, - // only if convenience layer download try is enabled - transferProgress += chunkEnd - off; - if (options.onProgress) { - options.onProgress({ loadedBytes: transferProgress }); - } - }); - } - await batch.do(); - return buffer; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * @@ -34706,31 +36971,48 @@ class BlobClient extends StorageClient { * content is already read and written into a local file * at the specified path. */ - async downloadToFile(filePath, offset = 0, count, options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-downloadToFile", options); - try { - const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); - if (response.readableStreamBody) { - await readStreamToLocalFile(response.readableStreamBody, filePath); - } - // The stream is no longer accessible so setting it to undefined. - response.blobDownloadStream = undefined; - return response; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobClient.prototype.downloadToFile = function (filePath, offset, count, options) { + if (offset === void 0) { offset = 0; } + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, response, e_16; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobClient-downloadToFile", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 5, 6, 7]); + return [4 /*yield*/, this.download(offset, count, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) }))]; + case 2: + response = _b.sent(); + if (!response.readableStreamBody) return [3 /*break*/, 4]; + return [4 /*yield*/, readStreamToLocalFile(response.readableStreamBody, filePath)]; + case 3: + _b.sent(); + _b.label = 4; + case 4: + // The stream is no longer accessible so setting it to undefined. + response.blobDownloadStream = undefined; + return [2 /*return*/, response]; + case 5: + e_16 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_16.message + }); + throw e_16; + case 6: + span.end(); + return [7 /*endfinally*/]; + case 7: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } - getBlobAndContainerNamesFromUrl() { - let containerName; - let blobName; + }); + }; + BlobClient.prototype.getBlobAndContainerNamesFromUrl = function () { + var containerName; + var blobName; try { // URL may look like the following // "https://myaccount.blob.core.windows.net/mycontainer/blob?sasString"; @@ -34739,11 +37021,11 @@ class BlobClient extends StorageClient { // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt"; // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob` // http://localhost:10001/devstoreaccount1/containername/blob - const parsedUrl = coreHttp.URLBuilder.parse(this.url); + var parsedUrl = coreHttp.URLBuilder.parse(this.url); if (parsedUrl.getHost().split(".")[1] === "blob") { // "https://myaccount.blob.core.windows.net/containername/blob". // .getPath() -> /containername/blob - const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); + var pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); containerName = pathComponents[1]; blobName = pathComponents[3]; } @@ -34751,14 +37033,14 @@ class BlobClient extends StorageClient { // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob // .getPath() -> /devstoreaccount1/containername/blob - const pathComponents = parsedUrl.getPath().match("/([^/]*)/([^/]*)(/(.*))?"); + var pathComponents = parsedUrl.getPath().match("/([^/]*)/([^/]*)(/(.*))?"); containerName = pathComponents[2]; blobName = pathComponents[4]; } else { // "https://customdomain.com/containername/blob". // .getPath() -> /containername/blob - const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); + var pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); containerName = pathComponents[1]; blobName = pathComponents[3]; } @@ -34771,12 +37053,12 @@ class BlobClient extends StorageClient { if (!containerName) { throw new Error("Provided containerName is invalid."); } - return { blobName, containerName }; + return { blobName: blobName, containerName: containerName }; } catch (error) { throw new Error("Unable to extract blobName and containerName with provided information."); } - } + }; /** * Asynchronously copies a blob to a destination within the storage account. * In version 2012-02-12 and later, the source for a Copy Blob operation can be @@ -34790,31 +37072,43 @@ class BlobClient extends StorageClient { * @param copySource - url to the source Azure Blob/File. * @param options - Optional options to the Blob Start Copy From URL operation. */ - async startCopyFromURL(copySource, options = {}) { + BlobClient.prototype.startCopyFromURL = function (copySource, options) { var _a; - const { span, updatedOptions } = createSpan("BlobClient-startCopyFromURL", options); - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; - try { - return await this.blobContext.startCopyFromURL(copySource, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, - sourceIfTags: options.sourceConditions.tagConditions - }, rehydratePriority: options.rehydratePriority, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), sealBlob: options.sealBlob }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_17; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("BlobClient-startCopyFromURL", options), span = _b.span, updatedOptions = _b.updatedOptions; + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.blobContext.startCopyFromURL(copySource, tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + sourceIfTags: options.sourceConditions.tagConditions + }, rehydratePriority: options.rehydratePriority, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), sealBlob: options.sealBlob }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_17 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_17.message + }); + throw e_17; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Only available for BlobClient constructed with a shared key credential. * @@ -34826,25 +37120,29 @@ class BlobClient extends StorageClient { * @param options - Optional parameters. * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ - generateSasUrl(options) { - return new Promise((resolve) => { - if (!(this.credential instanceof StorageSharedKeyCredential)) { + BlobClient.prototype.generateSasUrl = function (options) { + var _this = this; + return new Promise(function (resolve) { + if (!(_this.credential instanceof StorageSharedKeyCredential)) { throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); } - const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).toString(); - resolve(appendToURLQuery(this.url, sas)); + var sas = generateBlobSASQueryParameters(tslib.__assign({ containerName: _this._containerName, blobName: _this._name, snapshotTime: _this._snapshot, versionId: _this._versionId }, options), _this.credential).toString(); + resolve(appendToURLQuery(_this.url, sas)); }); - } -} + }; + return BlobClient; +}(StorageClient)); /** * AppendBlobClient defines a set of operations applicable to append blobs. */ -class AppendBlobClient extends BlobClient { - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { +var AppendBlobClient = /** @class */ (function (_super) { + tslib.__extends(AppendBlobClient, _super); + function AppendBlobClient(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { + var _this = this; // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); - let pipeline; - let url; + var pipeline; + var url; options = options || {}; if (credentialOrPipelineOrContainerName instanceof Pipeline) { // (url: string, pipeline: Pipeline) @@ -34871,12 +37169,12 @@ class AppendBlobClient extends BlobClient { blobNameOrOptions && typeof blobNameOrOptions === "string") { // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) - const containerName = credentialOrPipelineOrContainerName; - const blobName = blobNameOrOptions; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + var containerName = credentialOrPipelineOrContainerName; + var blobName = blobNameOrOptions; + var extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); pipeline = newPipeline(sharedKeyCredential, options); @@ -34896,8 +37194,9 @@ class AppendBlobClient extends BlobClient { else { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } - super(url, pipeline); - this.appendBlobContext = new AppendBlob(this.storageClientContext); + _this = _super.call(this, url, pipeline) || this; + _this.appendBlobContext = new AppendBlob(_this.storageClientContext); + return _this; } /** * Creates a new AppendBlobClient object identical to the source but with the @@ -34907,9 +37206,9 @@ class AppendBlobClient extends BlobClient { * @param snapshot - The snapshot timestamp. * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. */ - withSnapshot(snapshot) { + AppendBlobClient.prototype.withSnapshot = function (snapshot) { return new AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); - } + }; /** * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. * @see https://docs.microsoft.com/rest/api/storageservices/put-blob @@ -34924,25 +37223,37 @@ class AppendBlobClient extends BlobClient { * await appendBlobClient.create(); * ``` */ - async create(options = {}) { + AppendBlobClient.prototype.create = function (options) { var _a; - const { span, updatedOptions } = createSpan("AppendBlobClient-create", options); - options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.appendBlobContext.create(0, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_18; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("AppendBlobClient-create", options), span = _b.span, updatedOptions = _b.updatedOptions; + options.conditions = options.conditions || {}; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.appendBlobContext.create(0, tslib.__assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_18 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_18.message + }); + throw e_18; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. * If the blob with the same name already exists, the content of the existing blob will remain unchanged. @@ -34950,56 +37261,81 @@ class AppendBlobClient extends BlobClient { * * @param options - */ - async createIfNotExists(options = {}) { + AppendBlobClient.prototype.createIfNotExists = function (options) { var _a, _b; - const { span, updatedOptions } = createSpan("AppendBlobClient-createIfNotExists", options); - const conditions = { ifNoneMatch: ETagAny }; - try { - const res = await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions })); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a blob only if it does not already exist." - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); - } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _c, span, updatedOptions, conditions, res, e_19; + return tslib.__generator(this, function (_d) { + switch (_d.label) { + case 0: + _c = createSpan("AppendBlobClient-createIfNotExists", options), span = _c.span, updatedOptions = _c.updatedOptions; + conditions = { ifNoneMatch: ETagAny }; + _d.label = 1; + case 1: + _d.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.create(tslib.__assign(tslib.__assign({}, updatedOptions), { conditions: conditions }))]; + case 2: + res = _d.sent(); + return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable + })]; + case 3: + e_19 = _d.sent(); + if (((_a = e_19.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when creating a blob only if it does not already exist." + }); + return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_19.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_19.response })]; + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_19.message + }); + throw e_19; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Seals the append blob, making it read only. * * @param options - */ - async seal(options = {}) { + AppendBlobClient.prototype.seal = function (options) { var _a; - const { span, updatedOptions } = createSpan("AppendBlobClient-seal", options); - options.conditions = options.conditions || {}; - try { - return await this.appendBlobContext.seal(Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_20; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("AppendBlobClient-seal", options), span = _b.span, updatedOptions = _b.updatedOptions; + options.conditions = options.conditions || {}; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.appendBlobContext.seal(tslib.__assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_20 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_20.message + }); + throw e_20; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Commits a new block of data to the end of the existing append blob. * @see https://docs.microsoft.com/rest/api/storageservices/append-block @@ -35024,27 +37360,39 @@ class AppendBlobClient extends BlobClient { * await existingAppendBlobClient.appendBlock(content, content.length); * ``` */ - async appendBlock(body, contentLength, options = {}) { + AppendBlobClient.prototype.appendBlock = function (body, contentLength, options) { var _a; - const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlock", options); - options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.appendBlobContext.appendBlock(contentLength, body, Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress - }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_21; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("AppendBlobClient-appendBlock", options), span = _b.span, updatedOptions = _b.updatedOptions; + options.conditions = options.conditions || {}; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.appendBlobContext.appendBlock(contentLength, body, tslib.__assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress + }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_21 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_21.message + }); + throw e_21; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * The Append Block operation commits a new block of data to the end of an existing append blob * where the contents are read from a source url. @@ -35059,41 +37407,56 @@ class AppendBlobClient extends BlobClient { * @param count - Number of bytes to be appended as a block * @param options - */ - async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { + AppendBlobClient.prototype.appendBlockFromURL = function (sourceURL, sourceOffset, count, options) { var _a; - const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlockFromURL", options); - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, Object.assign({ abortSignal: options.abortSignal, sourceRange: rangeToString({ offset: sourceOffset, count }), sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, appendPositionAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince - }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_22; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("AppendBlobClient-appendBlockFromURL", options), span = _b.span, updatedOptions = _b.updatedOptions; + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, tslib.__assign({ abortSignal: options.abortSignal, sourceRange: rangeToString({ offset: sourceOffset, count: count }), sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, appendPositionAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_22 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_22.message + }); + throw e_22; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } -} + }); + }; + return AppendBlobClient; +}(BlobClient)); /** * BlockBlobClient defines a set of operations applicable to block blobs. */ -class BlockBlobClient extends BlobClient { - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { +var BlockBlobClient = /** @class */ (function (_super) { + tslib.__extends(BlockBlobClient, _super); + function BlockBlobClient(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { + var _this = this; // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); - let pipeline; - let url; + var pipeline; + var url; options = options || {}; if (credentialOrPipelineOrContainerName instanceof Pipeline) { // (url: string, pipeline: Pipeline) @@ -35120,12 +37483,12 @@ class BlockBlobClient extends BlobClient { blobNameOrOptions && typeof blobNameOrOptions === "string") { // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) - const containerName = credentialOrPipelineOrContainerName; - const blobName = blobNameOrOptions; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + var containerName = credentialOrPipelineOrContainerName; + var blobName = blobNameOrOptions; + var extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); pipeline = newPipeline(sharedKeyCredential, options); @@ -35145,9 +37508,10 @@ class BlockBlobClient extends BlobClient { else { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } - super(url, pipeline); - this.blockBlobContext = new BlockBlob(this.storageClientContext); - this._blobContext = new Blob$1(this.storageClientContext); + _this = _super.call(this, url, pipeline) || this; + _this.blockBlobContext = new BlockBlob(_this.storageClientContext); + _this._blobContext = new Blob$1(_this.storageClientContext); + return _this; } /** * Creates a new BlockBlobClient object identical to the source but with the @@ -35157,9 +37521,9 @@ class BlockBlobClient extends BlobClient { * @param snapshot - The snapshot timestamp. * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. */ - withSnapshot(snapshot) { + BlockBlobClient.prototype.withSnapshot = function (snapshot) { return new BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); - } + }; /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * @@ -35190,35 +37554,47 @@ class BlockBlobClient extends BlobClient { * @param query - * @param options - */ - async query(query, options = {}) { + BlockBlobClient.prototype.query = function (query, options) { var _a; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const { span, updatedOptions } = createSpan("BlockBlobClient-query", options); - try { - if (false) {} - const response = await this._blobContext.query(Object.assign({ abortSignal: options.abortSignal, queryRequest: { - queryType: "SQL", - expression: query, - inputSerialization: toQuerySerialization(options.inputTextConfiguration), - outputSerialization: toQuerySerialization(options.outputTextConfiguration) - }, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - return new BlobQueryResponse(response, { - abortSignal: options.abortSignal, - onProgress: options.onProgress, - onError: options.onError - }); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, response, e_23; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + _b = createSpan("BlockBlobClient-query", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this._blobContext.query(tslib.__assign({ abortSignal: options.abortSignal, queryRequest: { + queryType: "SQL", + expression: query, + inputSerialization: toQuerySerialization(options.inputTextConfiguration), + outputSerialization: toQuerySerialization(options.outputTextConfiguration) + }, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: + response = _c.sent(); + return [2 /*return*/, new BlobQueryResponse(response, { + abortSignal: options.abortSignal, + onProgress: options.onProgress, + onError: options.onError + })]; + case 3: + e_23 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_23.message + }); + throw e_23; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Creates a new block blob, or updates the content of an existing block blob. * Updating an existing block blob overwrites any existing metadata on the blob. @@ -35246,27 +37622,39 @@ class BlockBlobClient extends BlobClient { * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); * ``` */ - async upload(body, contentLength, options = {}) { + BlockBlobClient.prototype.upload = function (body, contentLength, options) { var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("BlockBlobClient-upload", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.upload(contentLength, body, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress - }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_24; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + options.conditions = options.conditions || {}; + _b = createSpan("BlockBlobClient-upload", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.blockBlobContext.upload(contentLength, body, tslib.__assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_24 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_24.message + }); + throw e_24; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Creates a new Block Blob where the contents of the blob are read from a given URL. * This API is supported beginning with the 2020-04-08 version. Partial updates @@ -35285,31 +37673,43 @@ class BlockBlobClient extends BlobClient { * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= * @param options - Optional parameters. */ - async syncUploadFromURL(sourceURL, options = {}) { + BlockBlobClient.prototype.syncUploadFromURL = function (sourceURL, options) { var _a, _b, _c, _d, _e; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("BlockBlobClient-syncUploadFromURL", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: options.conditions.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: (_a = options.sourceConditions) === null || _a === void 0 ? void 0 : _a.ifMatch, - sourceIfModifiedSince: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifModifiedSince, - sourceIfNoneMatch: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch, - sourceIfUnmodifiedSince: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifUnmodifiedSince, - sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions - }, cpkInfo: options.customerProvidedKey, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }), convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _f, span, updatedOptions, e_25; + return tslib.__generator(this, function (_g) { + switch (_g.label) { + case 0: + options.conditions = options.conditions || {}; + _f = createSpan("BlockBlobClient-syncUploadFromURL", options), span = _f.span, updatedOptions = _f.updatedOptions; + _g.label = 1; + case 1: + _g.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.blockBlobContext.putBlobFromUrl(0, sourceURL, tslib.__assign(tslib.__assign(tslib.__assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: options.conditions.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: (_a = options.sourceConditions) === null || _a === void 0 ? void 0 : _a.ifMatch, + sourceIfModifiedSince: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifModifiedSince, + sourceIfNoneMatch: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch, + sourceIfUnmodifiedSince: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifUnmodifiedSince, + sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions + }, cpkInfo: options.customerProvidedKey, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }), convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _g.sent()]; + case 3: + e_25 = _g.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_25.message + }); + throw e_25; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Uploads the specified block to the block blob's "staging area" to be later * committed by a call to commitBlockList. @@ -35321,25 +37721,37 @@ class BlockBlobClient extends BlobClient { * @param options - Options to the Block Blob Stage Block operation. * @returns Response data for the Block Blob Stage Block operation. */ - async stageBlock(blockId, body, contentLength, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlock", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.stageBlock(blockId, contentLength, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, requestOptions: { - onUploadProgress: options.onProgress - }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlockBlobClient.prototype.stageBlock = function (blockId, body, contentLength, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_26; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlockBlobClient-stageBlock", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.blockBlobContext.stageBlock(blockId, contentLength, body, tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, requestOptions: { + onUploadProgress: options.onProgress + }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_26 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_26.message + }); + throw e_26; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * The Stage Block From URL operation creates a new block to be committed as part * of a blob where the contents are read from a URL. @@ -35361,23 +37773,36 @@ class BlockBlobClient extends BlobClient { * @param options - Options to the Block Blob Stage Block From URL operation. * @returns Response data for the Block Blob Stage Block From URL operation. */ - async stageBlockFromURL(blockId, sourceURL, offset = 0, count, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlockFromURL", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset, count }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlockBlobClient.prototype.stageBlockFromURL = function (blockId, sourceURL, offset, count, options) { + if (offset === void 0) { offset = 0; } + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_27; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlockBlobClient-stageBlockFromURL", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset: offset, count: count }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_27 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_27.message + }); + throw e_27; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Writes a blob by specifying the list of block IDs that make up the blob. * In order to be written as part of a blob, a block must have been successfully written @@ -35390,25 +37815,37 @@ class BlockBlobClient extends BlobClient { * @param options - Options to the Block Blob Commit Block List operation. * @returns Response data for the Block Blob Commit Block List operation. */ - async commitBlockList(blocks, options = {}) { + BlockBlobClient.prototype.commitBlockList = function (blocks, options) { var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("BlockBlobClient-commitBlockList", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.commitBlockList({ latest: blocks }, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_28; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + options.conditions = options.conditions || {}; + _b = createSpan("BlockBlobClient-commitBlockList", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.blockBlobContext.commitBlockList({ latest: blocks }, tslib.__assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_28 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_28.message + }); + throw e_28; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Returns the list of blocks that have been uploaded as part of a block blob * using the specified block list filter. @@ -35419,30 +37856,43 @@ class BlockBlobClient extends BlobClient { * @param options - Options to the Block Blob Get Block List operation. * @returns Response data for the Block Blob Get Block List operation. */ - async getBlockList(listType, options = {}) { + BlockBlobClient.prototype.getBlockList = function (listType, options) { var _a; - const { span, updatedOptions } = createSpan("BlockBlobClient-getBlockList", options); - try { - const res = await this.blockBlobContext.getBlockList(listType, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - if (!res.committedBlocks) { - res.committedBlocks = []; - } - if (!res.uncommittedBlocks) { - res.uncommittedBlocks = []; - } - return res; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, res, e_29; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("BlockBlobClient-getBlockList", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.blockBlobContext.getBlockList(listType, tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: + res = _c.sent(); + if (!res.committedBlocks) { + res.committedBlocks = []; + } + if (!res.uncommittedBlocks) { + res.uncommittedBlocks = []; + } + return [2 /*return*/, res]; + case 3: + e_29 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_29.message + }); + throw e_29; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; // High level functions /** * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. @@ -35455,36 +37905,42 @@ class BlockBlobClient extends BlobClient { * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView * @param options - */ - async uploadData(data, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadData", options); - try { - if (true) { - let buffer; - if (data instanceof Buffer) { - buffer = data; + BlockBlobClient.prototype.uploadData = function (data, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, buffer_1, browserBlob_1; + return tslib.__generator(this, function (_b) { + _a = createSpan("BlockBlobClient-uploadData", options), span = _a.span, updatedOptions = _a.updatedOptions; + try { + if (true) { + if (data instanceof Buffer) { + buffer_1 = data; + } + else if (data instanceof ArrayBuffer) { + buffer_1 = Buffer.from(data); + } + else { + data = data; + buffer_1 = Buffer.from(data.buffer, data.byteOffset, data.byteLength); + } + return [2 /*return*/, this.uploadSeekableInternal(function (offset, size) { return buffer_1.slice(offset, offset + size); }, buffer_1.byteLength, updatedOptions)]; + } + else {} } - else if (data instanceof ArrayBuffer) { - buffer = Buffer.from(data); + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; } - else { - data = data; - buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength); + finally { + span.end(); } - return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); - } - else {} - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + return [2 /*return*/]; }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * ONLY AVAILABLE IN BROWSERS. * @@ -35500,23 +37956,35 @@ class BlockBlobClient extends BlobClient { * @param options - Options to upload browser data. * @returns Response data for the Blob Upload operation. */ - async uploadBrowserData(browserData, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadBrowserData", options); - try { - const browserBlob = new Blob([browserData]); - return await this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlockBlobClient.prototype.uploadBrowserData = function (browserData, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, browserBlob_2, e_30; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlockBlobClient-uploadBrowserData", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + browserBlob_2 = new Blob([browserData]); + return [4 /*yield*/, this.uploadSeekableInternal(function (offset, size) { return browserBlob_2.slice(offset, offset + size); }, browserBlob_2.size, updatedOptions)]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_30 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_30.message + }); + throw e_30; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * * Uploads data to block blob. Requires a bodyFactory as the data source, @@ -35532,88 +38000,115 @@ class BlockBlobClient extends BlobClient { * @param options - Options to Upload to Block Blob operation. * @returns Response data for the Blob Upload operation. */ - async uploadSeekableInternal(bodyFactory, size, options = {}) { - if (!options.blockSize) { - options.blockSize = 0; - } - if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { - throw new RangeError(`blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); - } - if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) { - options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; - } - if (options.maxSingleShotSize < 0 || - options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { - throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); - } - if (options.blockSize === 0) { - if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) { - throw new RangeError(`${size} is too larger to upload to a block blob.`); - } - if (size > options.maxSingleShotSize) { - options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); - if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { - options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; - } - } - } - if (!options.blobHTTPHeaders) { - options.blobHTTPHeaders = {}; - } - if (!options.conditions) { - options.conditions = {}; - } - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadSeekableInternal", options); - try { - if (size <= options.maxSingleShotSize) { - return await this.upload(bodyFactory(0, size), size, updatedOptions); - } - const numBlocks = Math.floor((size - 1) / options.blockSize) + 1; - if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) { - throw new RangeError(`The buffer's size is too big or the BlockSize is too small;` + - `the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`); - } - const blockList = []; - const blockIDPrefix = coreHttp.generateUuid(); - let transferProgress = 0; - const batch = new Batch(options.concurrency); - for (let i = 0; i < numBlocks; i++) { - batch.addOperation(async () => { - const blockID = generateBlockID(blockIDPrefix, i); - const start = options.blockSize * i; - const end = i === numBlocks - 1 ? size : start + options.blockSize; - const contentLength = end - start; - blockList.push(blockID); - await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, { - abortSignal: options.abortSignal, - conditions: options.conditions, - encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions - }); - // Update progress after block is successfully uploaded to server, in case of block trying - // TODO: Hook with convenience layer progress event in finer level - transferProgress += contentLength; - if (options.onProgress) { - options.onProgress({ - loadedBytes: transferProgress + BlockBlobClient.prototype.uploadSeekableInternal = function (bodyFactory, size, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, numBlocks_1, blockList_1, blockIDPrefix_1, transferProgress_2, batch, _loop_2, i, e_31; + var _this = this; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + if (!options.blockSize) { + options.blockSize = 0; + } + if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { + throw new RangeError("blockSize option must be >= 0 and <= " + BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES); + } + if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) { + options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; + } + if (options.maxSingleShotSize < 0 || + options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { + throw new RangeError("maxSingleShotSize option must be >= 0 and <= " + BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES); + } + if (options.blockSize === 0) { + if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(size + " is too larger to upload to a block blob."); + } + if (size > options.maxSingleShotSize) { + options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); + if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { + options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + } + } + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + _a = createSpan("BlockBlobClient-uploadSeekableInternal", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 5, 6, 7]); + if (!(size <= options.maxSingleShotSize)) return [3 /*break*/, 3]; + return [4 /*yield*/, this.upload(bodyFactory(0, size), size, updatedOptions)]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + numBlocks_1 = Math.floor((size - 1) / options.blockSize) + 1; + if (numBlocks_1 > BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError("The buffer's size is too big or the BlockSize is too small;" + + ("the number of blocks must be <= " + BLOCK_BLOB_MAX_BLOCKS)); + } + blockList_1 = []; + blockIDPrefix_1 = coreHttp.generateUuid(); + transferProgress_2 = 0; + batch = new Batch(options.concurrency); + _loop_2 = function (i) { + batch.addOperation(function () { return tslib.__awaiter(_this, void 0, void 0, function () { + var blockID, start, end, contentLength; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + blockID = generateBlockID(blockIDPrefix_1, i); + start = options.blockSize * i; + end = i === numBlocks_1 - 1 ? size : start + options.blockSize; + contentLength = end - start; + blockList_1.push(blockID); + return [4 /*yield*/, this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, { + abortSignal: options.abortSignal, + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions + })]; + case 1: + _a.sent(); + // Update progress after block is successfully uploaded to server, in case of block trying + // TODO: Hook with convenience layer progress event in finer level + transferProgress_2 += contentLength; + if (options.onProgress) { + options.onProgress({ + loadedBytes: transferProgress_2 + }); + } + return [2 /*return*/]; + } + }); + }); }); + }; + for (i = 0; i < numBlocks_1; i++) { + _loop_2(i); + } + return [4 /*yield*/, batch.do()]; + case 4: + _b.sent(); + return [2 /*return*/, this.commitBlockList(blockList_1, updatedOptions)]; + case 5: + e_31 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_31.message }); - } - }); - } - await batch.do(); - return this.commitBlockList(blockList, updatedOptions); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + throw e_31; + case 6: + span.end(); + return [7 /*endfinally*/]; + case 7: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * @@ -35627,29 +38122,45 @@ class BlockBlobClient extends BlobClient { * @param options - Options to Upload to Block Blob operation. * @returns Response data for the Blob Upload operation. */ - async uploadFile(filePath, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadFile", options); - try { - const size = (await fsStat(filePath)).size; - return await this.uploadSeekableInternal((offset, count) => { - return () => fsCreateReadStream(filePath, { - autoClose: true, - end: count ? offset + count - 1 : Infinity, - start: offset - }); - }, size, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlockBlobClient.prototype.uploadFile = function (filePath, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, size, e_32; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlockBlobClient-uploadFile", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 4, 5, 6]); + return [4 /*yield*/, fsStat(filePath)]; + case 2: + size = (_b.sent()).size; + return [4 /*yield*/, this.uploadSeekableInternal(function (offset, count) { + return function () { + return fsCreateReadStream(filePath, { + autoClose: true, + end: count ? offset + count - 1 : Infinity, + start: offset + }); + }; + }, size, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) }))]; + case 3: return [2 /*return*/, _b.sent()]; + case 4: + e_32 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_32.message + }); + throw e_32; + case 5: + span.end(); + return [7 /*endfinally*/]; + case 6: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * @@ -35666,63 +38177,92 @@ class BlockBlobClient extends BlobClient { * @param options - Options to Upload Stream to Block Blob operation. * @returns Response data for the Blob Upload operation. */ - async uploadStream(stream, bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { - if (!options.blobHTTPHeaders) { - options.blobHTTPHeaders = {}; - } - if (!options.conditions) { - options.conditions = {}; - } - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadStream", options); - try { - let blockNum = 0; - const blockIDPrefix = coreHttp.generateUuid(); - let transferProgress = 0; - const blockList = []; - const scheduler = new BufferScheduler(stream, bufferSize, maxConcurrency, async (body, length) => { - const blockID = generateBlockID(blockIDPrefix, blockNum); - blockList.push(blockID); - blockNum++; - await this.stageBlock(blockID, body, length, { - conditions: options.conditions, - encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions - }); - // Update progress after block is successfully uploaded to server, in case of block trying - transferProgress += length; - if (options.onProgress) { - options.onProgress({ loadedBytes: transferProgress }); - } - }, - // concurrency should set a smaller value than maxConcurrency, which is helpful to - // reduce the possibility when a outgoing handler waits for stream data, in - // this situation, outgoing handlers are blocked. - // Outgoing queue shouldn't be empty. - Math.ceil((maxConcurrency / 4) * 3)); - await scheduler.do(); - return await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlockBlobClient.prototype.uploadStream = function (stream, bufferSize, maxConcurrency, options) { + if (bufferSize === void 0) { bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES; } + if (maxConcurrency === void 0) { maxConcurrency = 5; } + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, blockNum_1, blockIDPrefix_2, transferProgress_3, blockList_2, scheduler, e_33; + var _this = this; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + _a = createSpan("BlockBlobClient-uploadStream", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 4, 5, 6]); + blockNum_1 = 0; + blockIDPrefix_2 = coreHttp.generateUuid(); + transferProgress_3 = 0; + blockList_2 = []; + scheduler = new BufferScheduler(stream, bufferSize, maxConcurrency, function (body, length) { return tslib.__awaiter(_this, void 0, void 0, function () { + var blockID; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + blockID = generateBlockID(blockIDPrefix_2, blockNum_1); + blockList_2.push(blockID); + blockNum_1++; + return [4 /*yield*/, this.stageBlock(blockID, body, length, { + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions + })]; + case 1: + _a.sent(); + // Update progress after block is successfully uploaded to server, in case of block trying + transferProgress_3 += length; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress_3 }); + } + return [2 /*return*/]; + } + }); + }); }, + // concurrency should set a smaller value than maxConcurrency, which is helpful to + // reduce the possibility when a outgoing handler waits for stream data, in + // this situation, outgoing handlers are blocked. + // Outgoing queue shouldn't be empty. + Math.ceil((maxConcurrency / 4) * 3)); + return [4 /*yield*/, scheduler.do()]; + case 2: + _b.sent(); + return [4 /*yield*/, this.commitBlockList(blockList_2, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) }))]; + case 3: return [2 /*return*/, _b.sent()]; + case 4: + e_33 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_33.message + }); + throw e_33; + case 5: + span.end(); + return [7 /*endfinally*/]; + case 6: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } -} + }); + }; + return BlockBlobClient; +}(BlobClient)); /** * PageBlobClient defines a set of operations applicable to page blobs. */ -class PageBlobClient extends BlobClient { - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { +var PageBlobClient = /** @class */ (function (_super) { + tslib.__extends(PageBlobClient, _super); + function PageBlobClient(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { + var _this = this; // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); - let pipeline; - let url; + var pipeline; + var url; options = options || {}; if (credentialOrPipelineOrContainerName instanceof Pipeline) { // (url: string, pipeline: Pipeline) @@ -35749,12 +38289,12 @@ class PageBlobClient extends BlobClient { blobNameOrOptions && typeof blobNameOrOptions === "string") { // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) - const containerName = credentialOrPipelineOrContainerName; - const blobName = blobNameOrOptions; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + var containerName = credentialOrPipelineOrContainerName; + var blobName = blobNameOrOptions; + var extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); pipeline = newPipeline(sharedKeyCredential, options); @@ -35774,8 +38314,9 @@ class PageBlobClient extends BlobClient { else { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } - super(url, pipeline); - this.pageBlobContext = new PageBlob(this.storageClientContext); + _this = _super.call(this, url, pipeline) || this; + _this.pageBlobContext = new PageBlob(_this.storageClientContext); + return _this; } /** * Creates a new PageBlobClient object identical to the source but with the @@ -35785,9 +38326,9 @@ class PageBlobClient extends BlobClient { * @param snapshot - The snapshot timestamp. * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. */ - withSnapshot(snapshot) { + PageBlobClient.prototype.withSnapshot = function (snapshot) { return new PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); - } + }; /** * Creates a page blob of the specified length. Call uploadPages to upload data * data to a page blob. @@ -35797,25 +38338,37 @@ class PageBlobClient extends BlobClient { * @param options - Options to the Page Blob Create operation. * @returns Response data for the Page Blob Create operation. */ - async create(size, options = {}) { + PageBlobClient.prototype.create = function (size, options) { var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-create", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.pageBlobContext.create(0, size, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, blobSequenceNumber: options.blobSequenceNumber, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_34; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + options.conditions = options.conditions || {}; + _b = createSpan("PageBlobClient-create", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.pageBlobContext.create(0, size, tslib.__assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, blobSequenceNumber: options.blobSequenceNumber, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_34 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_34.message + }); + throw e_34; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Creates a page blob of the specified length. Call uploadPages to upload data * data to a page blob. If the blob with the same name already exists, the content @@ -35825,33 +38378,46 @@ class PageBlobClient extends BlobClient { * @param size - size of the page blob. * @param options - */ - async createIfNotExists(size, options = {}) { + PageBlobClient.prototype.createIfNotExists = function (size, options) { var _a, _b; - const { span, updatedOptions } = createSpan("PageBlobClient-createIfNotExists", options); - try { - const conditions = { ifNoneMatch: ETagAny }; - const res = await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions })); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a blob only if it does not already exist." - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); - } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _c, span, updatedOptions, conditions, res, e_35; + return tslib.__generator(this, function (_d) { + switch (_d.label) { + case 0: + _c = createSpan("PageBlobClient-createIfNotExists", options), span = _c.span, updatedOptions = _c.updatedOptions; + _d.label = 1; + case 1: + _d.trys.push([1, 3, 4, 5]); + conditions = { ifNoneMatch: ETagAny }; + return [4 /*yield*/, this.create(size, tslib.__assign(tslib.__assign({}, options), { conditions: conditions, tracingOptions: updatedOptions.tracingOptions }))]; + case 2: + res = _d.sent(); + return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable + })]; + case 3: + e_35 = _d.sent(); + if (((_a = e_35.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when creating a blob only if it does not already exist." + }); + return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_35.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_35.response })]; + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_35.message + }); + throw e_35; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. * @see https://docs.microsoft.com/rest/api/storageservices/put-page @@ -35862,27 +38428,39 @@ class PageBlobClient extends BlobClient { * @param options - Options to the Page Blob Upload Pages operation. * @returns Response data for the Page Blob Upload Pages operation. */ - async uploadPages(body, offset, count, options = {}) { + PageBlobClient.prototype.uploadPages = function (body, offset, count, options) { var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-uploadPages", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.pageBlobContext.uploadPages(count, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress - }, range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_36; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + options.conditions = options.conditions || {}; + _b = createSpan("PageBlobClient-uploadPages", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.pageBlobContext.uploadPages(count, body, tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress + }, range: rangeToString({ offset: offset, count: count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_36 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_36.message + }); + throw e_36; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * The Upload Pages operation writes a range of pages to a page blob where the * contents are read from a URL. @@ -35894,31 +38472,43 @@ class PageBlobClient extends BlobClient { * @param count - Number of bytes to be uploaded from source page blob * @param options - */ - async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { + PageBlobClient.prototype.uploadPagesFromURL = function (sourceURL, sourceOffset, destOffset, count, options) { var _a; - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-uploadPagesFromURL", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), Object.assign({ abortSignal: options.abortSignal, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, sequenceNumberAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince - }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_37; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + _b = createSpan("PageBlobClient-uploadPagesFromURL", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count: count }), 0, rangeToString({ offset: destOffset, count: count }), tslib.__assign({ abortSignal: options.abortSignal, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, sequenceNumberAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_37 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_37.message + }); + throw e_37; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Frees the specified pages from the page blob. * @see https://docs.microsoft.com/rest/api/storageservices/put-page @@ -35928,24 +38518,37 @@ class PageBlobClient extends BlobClient { * @param options - Options to the Page Blob Clear Pages operation. * @returns Response data for the Page Blob Clear Pages operation. */ - async clearPages(offset = 0, count, options = {}) { + PageBlobClient.prototype.clearPages = function (offset, count, options) { var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-clearPages", options); - try { - return await this.pageBlobContext.clearPages(0, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (offset === void 0) { offset = 0; } + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_38; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + options.conditions = options.conditions || {}; + _b = createSpan("PageBlobClient-clearPages", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.pageBlobContext.clearPages(0, tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset: offset, count: count }), sequenceNumberAccessConditions: options.conditions, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_38 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_38.message + }); + throw e_38; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Returns the list of valid page ranges for a page blob or snapshot of a page blob. * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges @@ -35955,26 +38558,39 @@ class PageBlobClient extends BlobClient { * @param options - Options to the Page Blob Get Ranges operation. * @returns Response data for the Page Blob Get Ranges operation. */ - async getPageRanges(offset = 0, count, options = {}) { + PageBlobClient.prototype.getPageRanges = function (offset, count, options) { var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRanges", options); - try { - return await this.pageBlobContext - .getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) - .then(rangeResponseFromModel); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (offset === void 0) { offset = 0; } + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_39; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + options.conditions = options.conditions || {}; + _b = createSpan("PageBlobClient-getPageRanges", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.pageBlobContext + .getPageRanges(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset: offset, count: count }) }, convertTracingToRequestOptionsBase(updatedOptions))) + .then(rangeResponseFromModel)]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_39 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_39.message + }); + throw e_39; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Gets the collection of page ranges that differ between a specified snapshot and this page blob. * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges @@ -35985,26 +38601,38 @@ class PageBlobClient extends BlobClient { * @param options - Options to the Page Blob Get Page Ranges Diff operation. * @returns Response data for the Page Blob Get Page Range Diff operation. */ - async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { + PageBlobClient.prototype.getPageRangesDiff = function (offset, count, prevSnapshot, options) { var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiff", options); - try { - return await this.pageBlobContext - .getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshot, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) - .then(rangeResponseFromModel); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_40; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + options.conditions = options.conditions || {}; + _b = createSpan("PageBlobClient-getPageRangesDiff", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.pageBlobContext + .getPageRangesDiff(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshot, range: rangeToString({ offset: offset, count: count }) }, convertTracingToRequestOptionsBase(updatedOptions))) + .then(rangeResponseFromModel)]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_40 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_40.message + }); + throw e_40; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges @@ -36015,26 +38643,38 @@ class PageBlobClient extends BlobClient { * @param options - Options to the Page Blob Get Page Ranges Diff operation. * @returns Response data for the Page Blob Get Page Range Diff operation. */ - async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl, options = {}) { + PageBlobClient.prototype.getPageRangesDiffForManagedDisks = function (offset, count, prevSnapshotUrl, options) { var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options); - try { - return await this.pageBlobContext - .getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevSnapshotUrl, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) - .then(rangeResponseFromModel); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_41; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + options.conditions = options.conditions || {}; + _b = createSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.pageBlobContext + .getPageRangesDiff(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevSnapshotUrl: prevSnapshotUrl, range: rangeToString({ offset: offset, count: count }) }, convertTracingToRequestOptionsBase(updatedOptions))) + .then(rangeResponseFromModel)]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_41 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_41.message + }); + throw e_41; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Resizes the page blob to the specified size (which must be a multiple of 512). * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties @@ -36043,24 +38683,36 @@ class PageBlobClient extends BlobClient { * @param options - Options to the Page Blob Resize operation. * @returns Response data for the Page Blob Resize operation. */ - async resize(size, options = {}) { + PageBlobClient.prototype.resize = function (size, options) { var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-resize", options); - try { - return await this.pageBlobContext.resize(size, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_42; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + options.conditions = options.conditions || {}; + _b = createSpan("PageBlobClient-resize", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.pageBlobContext.resize(size, tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_42 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_42.message + }); + throw e_42; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Sets a page blob's sequence number. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties @@ -36070,24 +38722,36 @@ class PageBlobClient extends BlobClient { * @param options - Options to the Page Blob Update Sequence Number operation. * @returns Response data for the Page Blob Update Sequence Number operation. */ - async updateSequenceNumber(sequenceNumberAction, sequenceNumber, options = {}) { + PageBlobClient.prototype.updateSequenceNumber = function (sequenceNumberAction, sequenceNumber, options) { var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-updateSequenceNumber", options); - try { - return await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, Object.assign({ abortSignal: options.abortSignal, blobSequenceNumber: sequenceNumber, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_43; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + options.conditions = options.conditions || {}; + _b = createSpan("PageBlobClient-updateSequenceNumber", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, tslib.__assign({ abortSignal: options.abortSignal, blobSequenceNumber: sequenceNumber, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_43 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_43.message + }); + throw e_43; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. * The snapshot is copied such that only the differential changes between the previously @@ -36101,46 +38765,69 @@ class PageBlobClient extends BlobClient { * @param options - Options to the Page Blob Copy Incremental operation. * @returns Response data for the Page Blob Copy Incremental operation. */ - async startCopyIncremental(copySource, options = {}) { + PageBlobClient.prototype.startCopyIncremental = function (copySource, options) { var _a; - const { span, updatedOptions } = createSpan("PageBlobClient-startCopyIncremental", options); - try { - return await this.pageBlobContext.copyIncremental(copySource, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_44; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("PageBlobClient-startCopyIncremental", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.pageBlobContext.copyIncremental(copySource, tslib.__assign({ abortSignal: options.abortSignal, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_44 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_44.message + }); + throw e_44; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } -} + }); + }; + return PageBlobClient; +}(BlobClient)); // Copyright (c) Microsoft Corporation. -async function getBodyAsText(batchResponse) { - let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES); - const responseLength = await streamToBuffer2(batchResponse.readableStreamBody, buffer); - // Slice the buffer to trim the empty ending. - buffer = buffer.slice(0, responseLength); - return buffer.toString(); +function getBodyAsText(batchResponse) { + return tslib.__awaiter(this, void 0, void 0, function () { + var buffer, responseLength; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES); + return [4 /*yield*/, streamToBuffer2(batchResponse.readableStreamBody, buffer)]; + case 1: + responseLength = _a.sent(); + // Slice the buffer to trim the empty ending. + buffer = buffer.slice(0, responseLength); + return [2 /*return*/, buffer.toString()]; + } + }); + }); } function utf8ByteLength(str) { return Buffer.byteLength(str); } // Copyright (c) Microsoft Corporation. -const HTTP_HEADER_DELIMITER = ": "; -const SPACE_DELIMITER = " "; -const NOT_FOUND = -1; +var HTTP_HEADER_DELIMITER = ": "; +var SPACE_DELIMITER = " "; +var NOT_FOUND = -1; /** * Util class for parsing batch response. */ -class BatchResponseParser { - constructor(batchResponse, subRequests) { +var BatchResponseParser = /** @class */ (function () { + function BatchResponseParser(batchResponse, subRequests) { if (!batchResponse || !batchResponse.contentType) { // In special case(reported), server may return invalid content-type which could not be parsed. throw new RangeError("batchResponse is malformed or doesn't contain valid content-type."); @@ -36152,119 +38839,129 @@ class BatchResponseParser { this.batchResponse = batchResponse; this.subRequests = subRequests; this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1]; - this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`; - this.batchResponseEnding = `--${this.responseBatchBoundary}--`; + this.perResponsePrefix = "--" + this.responseBatchBoundary + HTTP_LINE_ENDING; + this.batchResponseEnding = "--" + this.responseBatchBoundary + "--"; } // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response - async parseBatchResponse() { - // When logic reach here, suppose batch request has already succeeded with 202, so we can further parse - // sub request's response. - if (this.batchResponse._response.status != HTTPURLConnection.HTTP_ACCEPTED) { - throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`); - } - const responseBodyAsText = await getBodyAsText(this.batchResponse); - const subResponses = responseBodyAsText - .split(this.batchResponseEnding)[0] // string after ending is useless - .split(this.perResponsePrefix) - .slice(1); // string before first response boundary is useless - const subResponseCount = subResponses.length; - // Defensive coding in case of potential error parsing. - // Note: subResponseCount == 1 is special case where sub request is invalid. - // We try to prevent such cases through early validation, e.g. validate sub request count >= 1. - // While in unexpected sub request invalid case, we allow sub response to be parsed and return to user. - if (subResponseCount != this.subRequests.size && subResponseCount != 1) { - throw new Error("Invalid state: sub responses' count is not equal to sub requests' count."); - } - const deserializedSubResponses = new Array(subResponseCount); - let subResponsesSucceededCount = 0; - let subResponsesFailedCount = 0; - // Parse sub subResponses. - for (let index = 0; index < subResponseCount; index++) { - const subResponse = subResponses[index]; - const deserializedSubResponse = {}; - deserializedSubResponse.headers = new coreHttp.HttpHeaders(); - const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`); - let subRespHeaderStartFound = false; - let subRespHeaderEndFound = false; - let subRespFailed = false; - let contentId = NOT_FOUND; - for (const responseLine of responseLines) { - if (!subRespHeaderStartFound) { - // Convention line to indicate content ID - if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) { - contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]); - } - // Http version line with status code indicates the start of sub request's response. - // Example: HTTP/1.1 202 Accepted - if (responseLine.startsWith(HTTP_VERSION_1_1)) { - subRespHeaderStartFound = true; - const tokens = responseLine.split(SPACE_DELIMITER); - deserializedSubResponse.status = parseInt(tokens[1]); - deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER); - } - continue; // Skip convention headers not specifically for sub request i.e. Content-Type: application/http and Content-ID: * - } - if (responseLine.trim() === "") { - // Sub response's header start line already found, and the first empty line indicates header end line found. - if (!subRespHeaderEndFound) { - subRespHeaderEndFound = true; - } - continue; // Skip empty line - } - // Note: when code reach here, it indicates subRespHeaderStartFound == true - if (!subRespHeaderEndFound) { - if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) { - // Defensive coding to prevent from missing valuable lines. - throw new Error(`Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`); - } - // Parse headers of sub response. - const tokens = responseLine.split(HTTP_HEADER_DELIMITER); - deserializedSubResponse.headers.set(tokens[0], tokens[1]); - if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) { - deserializedSubResponse.errorCode = tokens[1]; - subRespFailed = true; - } + BatchResponseParser.prototype.parseBatchResponse = function () { + return tslib.__awaiter(this, void 0, void 0, function () { + var responseBodyAsText, subResponses, subResponseCount, deserializedSubResponses, subResponsesSucceededCount, subResponsesFailedCount, index, subResponse, deserializedSubResponse, responseLines, subRespHeaderStartFound, subRespHeaderEndFound, subRespFailed, contentId, _i, responseLines_1, responseLine, tokens, tokens; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + // When logic reach here, suppose batch request has already succeeded with 202, so we can further parse + // sub request's response. + if (this.batchResponse._response.status != HTTPURLConnection.HTTP_ACCEPTED) { + throw new Error("Invalid state: batch request failed with status: '" + this.batchResponse._response.status + "'."); + } + return [4 /*yield*/, getBodyAsText(this.batchResponse)]; + case 1: + responseBodyAsText = _a.sent(); + subResponses = responseBodyAsText + .split(this.batchResponseEnding)[0] // string after ending is useless + .split(this.perResponsePrefix) + .slice(1); + subResponseCount = subResponses.length; + // Defensive coding in case of potential error parsing. + // Note: subResponseCount == 1 is special case where sub request is invalid. + // We try to prevent such cases through early validation, e.g. validate sub request count >= 1. + // While in unexpected sub request invalid case, we allow sub response to be parsed and return to user. + if (subResponseCount != this.subRequests.size && subResponseCount != 1) { + throw new Error("Invalid state: sub responses' count is not equal to sub requests' count."); + } + deserializedSubResponses = new Array(subResponseCount); + subResponsesSucceededCount = 0; + subResponsesFailedCount = 0; + // Parse sub subResponses. + for (index = 0; index < subResponseCount; index++) { + subResponse = subResponses[index]; + deserializedSubResponse = {}; + deserializedSubResponse.headers = new coreHttp.HttpHeaders(); + responseLines = subResponse.split("" + HTTP_LINE_ENDING); + subRespHeaderStartFound = false; + subRespHeaderEndFound = false; + subRespFailed = false; + contentId = NOT_FOUND; + for (_i = 0, responseLines_1 = responseLines; _i < responseLines_1.length; _i++) { + responseLine = responseLines_1[_i]; + if (!subRespHeaderStartFound) { + // Convention line to indicate content ID + if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) { + contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]); + } + // Http version line with status code indicates the start of sub request's response. + // Example: HTTP/1.1 202 Accepted + if (responseLine.startsWith(HTTP_VERSION_1_1)) { + subRespHeaderStartFound = true; + tokens = responseLine.split(SPACE_DELIMITER); + deserializedSubResponse.status = parseInt(tokens[1]); + deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER); + } + continue; // Skip convention headers not specifically for sub request i.e. Content-Type: application/http and Content-ID: * + } + if (responseLine.trim() === "") { + // Sub response's header start line already found, and the first empty line indicates header end line found. + if (!subRespHeaderEndFound) { + subRespHeaderEndFound = true; + } + continue; // Skip empty line + } + // Note: when code reach here, it indicates subRespHeaderStartFound == true + if (!subRespHeaderEndFound) { + if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) { + // Defensive coding to prevent from missing valuable lines. + throw new Error("Invalid state: find non-empty line '" + responseLine + "' without HTTP header delimiter '" + HTTP_HEADER_DELIMITER + "'."); + } + tokens = responseLine.split(HTTP_HEADER_DELIMITER); + deserializedSubResponse.headers.set(tokens[0], tokens[1]); + if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) { + deserializedSubResponse.errorCode = tokens[1]; + subRespFailed = true; + } + } + else { + // Assemble body of sub response. + if (!deserializedSubResponse.bodyAsText) { + deserializedSubResponse.bodyAsText = ""; + } + deserializedSubResponse.bodyAsText += responseLine; + } + } // Inner for end + // The response will contain the Content-ID header for each corresponding subrequest response to use for tracking. + // The Content-IDs are set to a valid index in the subrequests we sent. In the status code 202 path, we could expect it + // to be 1-1 mapping from the [0, subRequests.size) to the Content-IDs returned. If not, we simply don't return that + // unexpected subResponse in the parsed reponse and we can always look it up in the raw response for debugging purpose. + if (contentId != NOT_FOUND && + Number.isInteger(contentId) && + contentId >= 0 && + contentId < this.subRequests.size && + deserializedSubResponses[contentId] === undefined) { + deserializedSubResponse._request = this.subRequests.get(contentId); + deserializedSubResponses[contentId] = deserializedSubResponse; + } + else { + logger.error("subResponses[" + index + "] is dropped as the Content-ID is not found or invalid, Content-ID: " + contentId); + } + if (subRespFailed) { + subResponsesFailedCount++; + } + else { + subResponsesSucceededCount++; + } + } + return [2 /*return*/, { + subResponses: deserializedSubResponses, + subResponsesSucceededCount: subResponsesSucceededCount, + subResponsesFailedCount: subResponsesFailedCount + }]; } - else { - // Assemble body of sub response. - if (!deserializedSubResponse.bodyAsText) { - deserializedSubResponse.bodyAsText = ""; - } - deserializedSubResponse.bodyAsText += responseLine; - } - } // Inner for end - // The response will contain the Content-ID header for each corresponding subrequest response to use for tracking. - // The Content-IDs are set to a valid index in the subrequests we sent. In the status code 202 path, we could expect it - // to be 1-1 mapping from the [0, subRequests.size) to the Content-IDs returned. If not, we simply don't return that - // unexpected subResponse in the parsed reponse and we can always look it up in the raw response for debugging purpose. - if (contentId != NOT_FOUND && - Number.isInteger(contentId) && - contentId >= 0 && - contentId < this.subRequests.size && - deserializedSubResponses[contentId] === undefined) { - deserializedSubResponse._request = this.subRequests.get(contentId); - deserializedSubResponses[contentId] = deserializedSubResponse; - } - else { - logger.error(`subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`); - } - if (subRespFailed) { - subResponsesFailedCount++; - } - else { - subResponsesSucceededCount++; - } - } - return { - subResponses: deserializedSubResponses, - subResponsesSucceededCount: subResponsesSucceededCount, - subResponsesFailedCount: subResponsesFailedCount - }; - } -} + }); + }); + }; + return BatchResponseParser; +}()); // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. var MutexLockStatus; (function (MutexLockStatus) { MutexLockStatus[MutexLockStatus["LOCKED"] = 0] = "LOCKED"; @@ -36273,68 +38970,82 @@ var MutexLockStatus; /** * An async mutex lock. */ -class Mutex { +var Mutex = /** @class */ (function () { + function Mutex() { + } /** * Lock for a specific key. If the lock has been acquired by another customer, then * will wait until getting the lock. * * @param key - lock key */ - static async lock(key) { - return new Promise((resolve) => { - if (this.keys[key] === undefined || this.keys[key] === MutexLockStatus.UNLOCKED) { - this.keys[key] = MutexLockStatus.LOCKED; - resolve(); - } - else { - this.onUnlockEvent(key, () => { - this.keys[key] = MutexLockStatus.LOCKED; - resolve(); - }); - } + Mutex.lock = function (key) { + return tslib.__awaiter(this, void 0, void 0, function () { + var _this = this; + return tslib.__generator(this, function (_a) { + return [2 /*return*/, new Promise(function (resolve) { + if (_this.keys[key] === undefined || _this.keys[key] === MutexLockStatus.UNLOCKED) { + _this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + } + else { + _this.onUnlockEvent(key, function () { + _this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + }); + } + })]; + }); }); - } + }; /** * Unlock a key. * * @param key - */ - static async unlock(key) { - return new Promise((resolve) => { - if (this.keys[key] === MutexLockStatus.LOCKED) { - this.emitUnlockEvent(key); - } - delete this.keys[key]; - resolve(); + Mutex.unlock = function (key) { + return tslib.__awaiter(this, void 0, void 0, function () { + var _this = this; + return tslib.__generator(this, function (_a) { + return [2 /*return*/, new Promise(function (resolve) { + if (_this.keys[key] === MutexLockStatus.LOCKED) { + _this.emitUnlockEvent(key); + } + delete _this.keys[key]; + resolve(); + })]; + }); }); - } - static onUnlockEvent(key, handler) { + }; + Mutex.onUnlockEvent = function (key, handler) { if (this.listeners[key] === undefined) { this.listeners[key] = [handler]; } else { this.listeners[key].push(handler); } - } - static emitUnlockEvent(key) { + }; + Mutex.emitUnlockEvent = function (key) { + var _this = this; if (this.listeners[key] !== undefined && this.listeners[key].length > 0) { - const handler = this.listeners[key].shift(); - setImmediate(() => { - handler.call(this); + var handler_1 = this.listeners[key].shift(); + setImmediate(function () { + handler_1.call(_this); }); } - } -} -Mutex.keys = {}; -Mutex.listeners = {}; + }; + Mutex.keys = {}; + Mutex.listeners = {}; + return Mutex; +}()); // Copyright (c) Microsoft Corporation. /** * A BlobBatch represents an aggregated set of operations on blobs. * Currently, only `delete` and `setAccessTier` are supported. */ -class BlobBatch { - constructor() { +var BlobBatch = /** @class */ (function () { + function BlobBatch() { this.batch = "batch"; this.batchRequest = new InnerBatchRequest(); } @@ -36343,151 +39054,203 @@ class BlobBatch { * The value must be multipart/mixed with a batch boundary. * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 */ - getMultiPartContentType() { + BlobBatch.prototype.getMultiPartContentType = function () { return this.batchRequest.getMultipartContentType(); - } + }; /** * Get assembled HTTP request body for sub requests. */ - getHttpRequestBody() { + BlobBatch.prototype.getHttpRequestBody = function () { return this.batchRequest.getHttpRequestBody(); - } + }; /** * Get sub requests that are added into the batch request. */ - getSubRequests() { + BlobBatch.prototype.getSubRequests = function () { return this.batchRequest.getSubRequests(); - } - async addSubRequestInternal(subRequest, assembleSubRequestFunc) { - await Mutex.lock(this.batch); - try { - this.batchRequest.preAddSubRequest(subRequest); - await assembleSubRequestFunc(); - this.batchRequest.postAddSubRequest(subRequest); - } - finally { - await Mutex.unlock(this.batch); - } - } - setBatchType(batchType) { + }; + BlobBatch.prototype.addSubRequestInternal = function (subRequest, assembleSubRequestFunc) { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, Mutex.lock(this.batch)]; + case 1: + _a.sent(); + _a.label = 2; + case 2: + _a.trys.push([2, , 4, 6]); + this.batchRequest.preAddSubRequest(subRequest); + return [4 /*yield*/, assembleSubRequestFunc()]; + case 3: + _a.sent(); + this.batchRequest.postAddSubRequest(subRequest); + return [3 /*break*/, 6]; + case 4: return [4 /*yield*/, Mutex.unlock(this.batch)]; + case 5: + _a.sent(); + return [7 /*endfinally*/]; + case 6: return [2 /*return*/]; + } + }); + }); + }; + BlobBatch.prototype.setBatchType = function (batchType) { if (!this.batchType) { this.batchType = batchType; } if (this.batchType !== batchType) { - throw new RangeError(`BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`); - } - } - async deleteBlob(urlOrBlobClient, credentialOrOptions, options) { - let url; - let credential; - if (typeof urlOrBlobClient === "string" && - ((coreHttp.isNode && credentialOrOptions instanceof StorageSharedKeyCredential) || - credentialOrOptions instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrOptions))) { - // First overload - url = urlOrBlobClient; - credential = credentialOrOptions; - } - else if (urlOrBlobClient instanceof BlobClient) { - // Second overload - url = urlOrBlobClient.url; - credential = urlOrBlobClient.credential; - options = credentialOrOptions; - } - else { - throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); - } - if (!options) { - options = {}; - } - const { span, updatedOptions } = createSpan("BatchDeleteRequest-addSubRequest", options); - try { - this.setBatchType("delete"); - await this.addSubRequestInternal({ - url: url, - credential: credential - }, async () => { - await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions); - }); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } - finally { - span.end(); - } - } - async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { - let url; - let credential; - let tier; - if (typeof urlOrBlobClient === "string" && - ((coreHttp.isNode && credentialOrTier instanceof StorageSharedKeyCredential) || - credentialOrTier instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrTier))) { - // First overload - url = urlOrBlobClient; - credential = credentialOrTier; - tier = tierOrOptions; - } - else if (urlOrBlobClient instanceof BlobClient) { - // Second overload - url = urlOrBlobClient.url; - credential = urlOrBlobClient.credential; - tier = credentialOrTier; - options = tierOrOptions; - } - else { - throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); - } - if (!options) { - options = {}; + throw new RangeError("BlobBatch only supports one operation type per batch and it already is being used for " + this.batchType + " operations."); } - const { span, updatedOptions } = createSpan("BatchSetTierRequest-addSubRequest", options); - try { - this.setBatchType("setAccessTier"); - await this.addSubRequestInternal({ - url: url, - credential: credential - }, async () => { - await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions); + }; + BlobBatch.prototype.deleteBlob = function (urlOrBlobClient, credentialOrOptions, options) { + return tslib.__awaiter(this, void 0, void 0, function () { + var url, credential, _a, span, updatedOptions, e_1; + var _this = this; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + if (typeof urlOrBlobClient === "string" && + ((coreHttp.isNode && credentialOrOptions instanceof StorageSharedKeyCredential) || + credentialOrOptions instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrOptions))) { + // First overload + url = urlOrBlobClient; + credential = credentialOrOptions; + } + else if (urlOrBlobClient instanceof BlobClient) { + // Second overload + url = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + options = credentialOrOptions; + } + else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + _a = createSpan("BatchDeleteRequest-addSubRequest", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + this.setBatchType("delete"); + return [4 /*yield*/, this.addSubRequestInternal({ + url: url, + credential: credential + }, function () { return tslib.__awaiter(_this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions)]; + case 1: + _a.sent(); + return [2 /*return*/]; + } + }); + }); })]; + case 2: + _b.sent(); + return [3 /*break*/, 5]; + case 3: + e_1 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_1.message + }); + throw e_1; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + }); + }; + BlobBatch.prototype.setBlobAccessTier = function (urlOrBlobClient, credentialOrTier, tierOrOptions, options) { + return tslib.__awaiter(this, void 0, void 0, function () { + var url, credential, tier, _a, span, updatedOptions, e_2; + var _this = this; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + if (typeof urlOrBlobClient === "string" && + ((coreHttp.isNode && credentialOrTier instanceof StorageSharedKeyCredential) || + credentialOrTier instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrTier))) { + // First overload + url = urlOrBlobClient; + credential = credentialOrTier; + tier = tierOrOptions; + } + else if (urlOrBlobClient instanceof BlobClient) { + // Second overload + url = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + tier = credentialOrTier; + options = tierOrOptions; + } + else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + _a = createSpan("BatchSetTierRequest-addSubRequest", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + this.setBatchType("setAccessTier"); + return [4 /*yield*/, this.addSubRequestInternal({ + url: url, + credential: credential + }, function () { return tslib.__awaiter(_this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions)]; + case 1: + _a.sent(); + return [2 /*return*/]; + } + }); + }); })]; + case 2: + _b.sent(); + return [3 /*break*/, 5]; + case 3: + e_2 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_2.message + }); + throw e_2; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } -} + }); + }; + return BlobBatch; +}()); /** * Inner batch request class which is responsible for assembling and serializing sub requests. * See https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#request-body for how requests are assembled. */ -class InnerBatchRequest { - constructor() { +var InnerBatchRequest = /** @class */ (function () { + function InnerBatchRequest() { this.operationCount = 0; this.body = ""; - const tempGuid = coreHttp.generateUuid(); + var tempGuid = coreHttp.generateUuid(); // batch_{batchid} - this.boundary = `batch_${tempGuid}`; + this.boundary = "batch_" + tempGuid; // --batch_{batchid} // Content-Type: application/http // Content-Transfer-Encoding: binary - this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`; + this.subRequestPrefix = "--" + this.boundary + HTTP_LINE_ENDING + HeaderConstants.CONTENT_TYPE + ": application/http" + HTTP_LINE_ENDING + HeaderConstants.CONTENT_TRANSFER_ENCODING + ": binary"; // multipart/mixed; boundary=batch_{batchid} - this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`; + this.multipartContentType = "multipart/mixed; boundary=" + this.boundary; // --batch_{batchid}-- - this.batchRequestEnding = `--${this.boundary}--`; + this.batchRequestEnding = "--" + this.boundary + "--"; this.subRequests = new Map(); } /** @@ -36497,10 +39260,10 @@ class InnerBatchRequest { * and intercept request from going to wire. * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. */ - createPipeline(credential) { - const isAnonymousCreds = credential instanceof AnonymousCredential; - const policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); // [deserializationPolicy, BatchHeaderFilterPolicyFactory, (Optional)Credential, BatchRequestAssemblePolicyFactory] - const factories = new Array(policyFactoryLength); + InnerBatchRequest.prototype.createPipeline = function (credential) { + var isAnonymousCreds = credential instanceof AnonymousCredential; + var policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); // [deserializationPolicy, BatchHeaderFilterPolicyFactory, (Optional)Credential, BatchRequestAssemblePolicyFactory] + var factories = new Array(policyFactoryLength); factories[0] = coreHttp.deserializationPolicy(); // Default deserializationPolicy is provided by protocol layer factories[1] = new BatchHeaderFilterPolicyFactory(); // Use batch header filter policy to exclude unnecessary headers if (!isAnonymousCreds) { @@ -36510,93 +39273,117 @@ class InnerBatchRequest { } factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); // Use batch assemble policy to assemble request and intercept request from going to wire return new Pipeline(factories, {}); - } - appendSubRequestToBody(request) { + }; + InnerBatchRequest.prototype.appendSubRequestToBody = function (request) { // Start to assemble sub request this.body += [ this.subRequestPrefix, - `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, + HeaderConstants.CONTENT_ID + ": " + this.operationCount, "", - `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}` // sub request start line with method + request.method.toString() + " " + getURLPathAndQuery(request.url) + " " + HTTP_VERSION_1_1 + HTTP_LINE_ENDING // sub request start line with method ].join(HTTP_LINE_ENDING); - for (const header of request.headers.headersArray()) { - this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`; + for (var _i = 0, _a = request.headers.headersArray(); _i < _a.length; _i++) { + var header = _a[_i]; + this.body += header.name + ": " + header.value + HTTP_LINE_ENDING; } this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line // No body to assemble for current batch request support // End to assemble sub request - } - preAddSubRequest(subRequest) { + }; + InnerBatchRequest.prototype.preAddSubRequest = function (subRequest) { if (this.operationCount >= BATCH_MAX_REQUEST) { - throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); + throw new RangeError("Cannot exceed " + BATCH_MAX_REQUEST + " sub requests in a single batch"); } // Fast fail if url for sub request is invalid - const path = getURLPath(subRequest.url); + var path = getURLPath(subRequest.url); if (!path || path == "") { - throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); + throw new RangeError("Invalid url for sub request: '" + subRequest.url + "'"); } - } - postAddSubRequest(subRequest) { + }; + InnerBatchRequest.prototype.postAddSubRequest = function (subRequest) { this.subRequests.set(this.operationCount, subRequest); this.operationCount++; - } + }; // Return the http request body with assembling the ending line to the sub request body. - getHttpRequestBody() { - return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`; - } - getMultipartContentType() { + InnerBatchRequest.prototype.getHttpRequestBody = function () { + return "" + this.body + this.batchRequestEnding + HTTP_LINE_ENDING; + }; + InnerBatchRequest.prototype.getMultipartContentType = function () { return this.multipartContentType; - } - getSubRequests() { + }; + InnerBatchRequest.prototype.getSubRequests = function () { return this.subRequests; - } -} -class BatchRequestAssemblePolicy extends coreHttp.BaseRequestPolicy { - constructor(batchRequest, nextPolicy, options) { - super(nextPolicy, options); - this.dummyResponse = { + }; + return InnerBatchRequest; +}()); +var BatchRequestAssemblePolicy = /** @class */ (function (_super) { + tslib.__extends(BatchRequestAssemblePolicy, _super); + function BatchRequestAssemblePolicy(batchRequest, nextPolicy, options) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.dummyResponse = { request: new coreHttp.WebResource(), status: 200, headers: new coreHttp.HttpHeaders() }; - this.batchRequest = batchRequest; - } - async sendRequest(request) { - await this.batchRequest.appendSubRequestToBody(request); - return this.dummyResponse; // Intercept request from going to wire + _this.batchRequest = batchRequest; + return _this; } -} -class BatchRequestAssemblePolicyFactory { - constructor(batchRequest) { + BatchRequestAssemblePolicy.prototype.sendRequest = function (request) { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, this.batchRequest.appendSubRequestToBody(request)]; + case 1: + _a.sent(); + return [2 /*return*/, this.dummyResponse]; // Intercept request from going to wire + } + }); + }); + }; + return BatchRequestAssemblePolicy; +}(coreHttp.BaseRequestPolicy)); +var BatchRequestAssemblePolicyFactory = /** @class */ (function () { + function BatchRequestAssemblePolicyFactory(batchRequest) { this.batchRequest = batchRequest; } - create(nextPolicy, options) { + BatchRequestAssemblePolicyFactory.prototype.create = function (nextPolicy, options) { return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options); + }; + return BatchRequestAssemblePolicyFactory; +}()); +var BatchHeaderFilterPolicy = /** @class */ (function (_super) { + tslib.__extends(BatchHeaderFilterPolicy, _super); + function BatchHeaderFilterPolicy(nextPolicy, options) { + return _super.call(this, nextPolicy, options) || this; + } + BatchHeaderFilterPolicy.prototype.sendRequest = function (request) { + return tslib.__awaiter(this, void 0, void 0, function () { + var xMsHeaderName, _i, _a, header; + return tslib.__generator(this, function (_b) { + xMsHeaderName = ""; + for (_i = 0, _a = request.headers.headersArray(); _i < _a.length; _i++) { + header = _a[_i]; + if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) { + xMsHeaderName = header.name; + } + } + if (xMsHeaderName !== "") { + request.headers.remove(xMsHeaderName); // The subrequests should not have the x-ms-version header. + } + return [2 /*return*/, this._nextPolicy.sendRequest(request)]; + }); + }); + }; + return BatchHeaderFilterPolicy; +}(coreHttp.BaseRequestPolicy)); +var BatchHeaderFilterPolicyFactory = /** @class */ (function () { + function BatchHeaderFilterPolicyFactory() { } -} -class BatchHeaderFilterPolicy extends coreHttp.BaseRequestPolicy { - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - async sendRequest(request) { - let xMsHeaderName = ""; - for (const header of request.headers.headersArray()) { - if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) { - xMsHeaderName = header.name; - } - } - if (xMsHeaderName !== "") { - request.headers.remove(xMsHeaderName); // The subrequests should not have the x-ms-version header. - } - return this._nextPolicy.sendRequest(request); - } -} -class BatchHeaderFilterPolicyFactory { - constructor() { } - create(nextPolicy, options) { + BatchHeaderFilterPolicyFactory.prototype.create = function (nextPolicy, options) { return new BatchHeaderFilterPolicy(nextPolicy, options); - } -} + }; + return BatchHeaderFilterPolicyFactory; +}()); // Copyright (c) Microsoft Corporation. /** @@ -36604,9 +39391,9 @@ class BatchHeaderFilterPolicyFactory { * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch */ -class BlobBatchClient { - constructor(url, credentialOrPipeline, options) { - let pipeline; +var BlobBatchClient = /** @class */ (function () { + function BlobBatchClient(url, credentialOrPipeline, options) { + var pipeline; if (credentialOrPipeline instanceof Pipeline) { pipeline = credentialOrPipeline; } @@ -36617,8 +39404,8 @@ class BlobBatchClient { else { pipeline = newPipeline(credentialOrPipeline, options); } - const storageClientContext = new StorageClientContext(url, pipeline.toServiceClientOptions()); - const path = getURLPath(url); + var storageClientContext = new StorageClientContext(url, pipeline.toServiceClientOptions()); + var path = getURLPath(url); if (path && path !== "/") { // Container scoped. this.serviceOrContainerContext = new Container(storageClientContext); @@ -36631,33 +39418,67 @@ class BlobBatchClient { * Creates a {@link BlobBatch}. * A BlobBatch represents an aggregated set of operations on blobs. */ - createBatch() { + BlobBatchClient.prototype.createBatch = function () { return new BlobBatch(); - } - async deleteBlobs(urlsOrBlobClients, credentialOrOptions, options) { - const batch = new BlobBatch(); - for (const urlOrBlobClient of urlsOrBlobClients) { - if (typeof urlOrBlobClient === "string") { - await batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options); - } - else { - await batch.deleteBlob(urlOrBlobClient, credentialOrOptions); - } - } - return this.submitBatch(batch); - } - async setBlobsAccessTier(urlsOrBlobClients, credentialOrTier, tierOrOptions, options) { - const batch = new BlobBatch(); - for (const urlOrBlobClient of urlsOrBlobClients) { - if (typeof urlOrBlobClient === "string") { - await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options); - } - else { - await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions); - } - } - return this.submitBatch(batch); - } + }; + BlobBatchClient.prototype.deleteBlobs = function (urlsOrBlobClients, credentialOrOptions, options) { + return tslib.__awaiter(this, void 0, void 0, function () { + var batch, _i, urlsOrBlobClients_1, urlOrBlobClient; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + batch = new BlobBatch(); + _i = 0, urlsOrBlobClients_1 = urlsOrBlobClients; + _a.label = 1; + case 1: + if (!(_i < urlsOrBlobClients_1.length)) return [3 /*break*/, 6]; + urlOrBlobClient = urlsOrBlobClients_1[_i]; + if (!(typeof urlOrBlobClient === "string")) return [3 /*break*/, 3]; + return [4 /*yield*/, batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options)]; + case 2: + _a.sent(); + return [3 /*break*/, 5]; + case 3: return [4 /*yield*/, batch.deleteBlob(urlOrBlobClient, credentialOrOptions)]; + case 4: + _a.sent(); + _a.label = 5; + case 5: + _i++; + return [3 /*break*/, 1]; + case 6: return [2 /*return*/, this.submitBatch(batch)]; + } + }); + }); + }; + BlobBatchClient.prototype.setBlobsAccessTier = function (urlsOrBlobClients, credentialOrTier, tierOrOptions, options) { + return tslib.__awaiter(this, void 0, void 0, function () { + var batch, _i, urlsOrBlobClients_2, urlOrBlobClient; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + batch = new BlobBatch(); + _i = 0, urlsOrBlobClients_2 = urlsOrBlobClients; + _a.label = 1; + case 1: + if (!(_i < urlsOrBlobClients_2.length)) return [3 /*break*/, 6]; + urlOrBlobClient = urlsOrBlobClients_2[_i]; + if (!(typeof urlOrBlobClient === "string")) return [3 /*break*/, 3]; + return [4 /*yield*/, batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options)]; + case 2: + _a.sent(); + return [3 /*break*/, 5]; + case 3: return [4 /*yield*/, batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions)]; + case 4: + _a.sent(); + _a.label = 5; + case 5: + _i++; + return [3 /*break*/, 1]; + case 6: return [2 /*return*/, this.submitBatch(batch)]; + } + }); + }); + }; /** * Submit batch request which consists of multiple subrequests. * @@ -36693,51 +39514,67 @@ class BlobBatchClient { * @param batchRequest - A set of Delete or SetTier operations. * @param options - */ - async submitBatch(batchRequest, options = {}) { - if (!batchRequest || batchRequest.getSubRequests().size == 0) { - throw new RangeError("Batch request should contain one or more sub requests."); - } - const { span, updatedOptions } = createSpan("BlobBatchClient-submitBatch", options); - try { - const batchRequestBody = batchRequest.getHttpRequestBody(); - // ServiceSubmitBatchResponseModel and ContainerSubmitBatchResponse are compatible for now. - const rawBatchResponse = await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); - // Parse the sub responses result, if logic reaches here(i.e. the batch request succeeded with status code 202). - const batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); - const responseSummary = await batchResponseParser.parseBatchResponse(); - const res = { - _response: rawBatchResponse._response, - contentType: rawBatchResponse.contentType, - errorCode: rawBatchResponse.errorCode, - requestId: rawBatchResponse.requestId, - clientRequestId: rawBatchResponse.clientRequestId, - version: rawBatchResponse.version, - subResponses: responseSummary.subResponses, - subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, - subResponsesFailedCount: responseSummary.subResponsesFailedCount - }; - return res; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobBatchClient.prototype.submitBatch = function (batchRequest, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, batchRequestBody, rawBatchResponse, batchResponseParser, responseSummary, res, e_1; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + if (!batchRequest || batchRequest.getSubRequests().size == 0) { + throw new RangeError("Batch request should contain one or more sub requests."); + } + _a = createSpan("BlobBatchClient-submitBatch", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 4, 5, 6]); + batchRequestBody = batchRequest.getHttpRequestBody(); + return [4 /*yield*/, this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, tslib.__assign(tslib.__assign({}, options), convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: + rawBatchResponse = _b.sent(); + batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); + return [4 /*yield*/, batchResponseParser.parseBatchResponse()]; + case 3: + responseSummary = _b.sent(); + res = { + _response: rawBatchResponse._response, + contentType: rawBatchResponse.contentType, + errorCode: rawBatchResponse.errorCode, + requestId: rawBatchResponse.requestId, + clientRequestId: rawBatchResponse.clientRequestId, + version: rawBatchResponse.version, + subResponses: responseSummary.subResponses, + subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, + subResponsesFailedCount: responseSummary.subResponsesFailedCount + }; + return [2 /*return*/, res]; + case 4: + e_1 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_1.message + }); + throw e_1; + case 5: + span.end(); + return [7 /*endfinally*/]; + case 6: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } -} + }); + }; + return BlobBatchClient; +}()); /** * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs. */ -class ContainerClient extends StorageClient { - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, options) { - let pipeline; - let url; +var ContainerClient = /** @class */ (function (_super) { + tslib.__extends(ContainerClient, _super); + function ContainerClient(urlOrConnectionString, credentialOrPipelineOrContainerName, options) { + var _this = this; + var pipeline; + var url; options = options || {}; if (credentialOrPipelineOrContainerName instanceof Pipeline) { // (url: string, pipeline: Pipeline) @@ -36761,11 +39598,11 @@ class ContainerClient extends StorageClient { else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string") { // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) - const containerName = credentialOrPipelineOrContainerName; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + var containerName = credentialOrPipelineOrContainerName; + var extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)); options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); pipeline = newPipeline(sharedKeyCredential, options); @@ -36785,16 +39622,21 @@ class ContainerClient extends StorageClient { else { throw new Error("Expecting non-empty strings for containerName parameter"); } - super(url, pipeline); - this._containerName = this.getContainerNameFromUrl(); - this.containerContext = new Container(this.storageClientContext); - } - /** - * The name of the container. - */ - get containerName() { - return this._containerName; + _this = _super.call(this, url, pipeline) || this; + _this._containerName = _this.getContainerNameFromUrl(); + _this.containerContext = new Container(_this.storageClientContext); + return _this; } + Object.defineProperty(ContainerClient.prototype, "containerName", { + /** + * The name of the container. + */ + get: function () { + return this._containerName; + }, + enumerable: false, + configurable: true + }); /** * Creates a new container under the specified account. If the container with * the same name already exists, the operation fails. @@ -36811,24 +39653,37 @@ class ContainerClient extends StorageClient { * console.log("Container was created successfully", createContainerResponse.requestId); * ``` */ - async create(options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-create", options); - try { - // Spread operator in destructuring assignments, - // this will filter out unwanted properties from the response object into result object - return await this.containerContext.create(Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + ContainerClient.prototype.create = function (options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_1; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("ContainerClient-create", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.containerContext.create(tslib.__assign(tslib.__assign({}, options), convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: + // Spread operator in destructuring assignments, + // this will filter out unwanted properties from the response object into result object + return [2 /*return*/, _b.sent()]; + case 3: + e_1 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_1.message + }); + throw e_1; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Creates a new container under the specified account. If the container with * the same name already exists, it is not changed. @@ -36836,32 +39691,45 @@ class ContainerClient extends StorageClient { * * @param options - */ - async createIfNotExists(options = {}) { + ContainerClient.prototype.createIfNotExists = function (options) { var _a, _b; - const { span, updatedOptions } = createSpan("ContainerClient-createIfNotExists", options); - try { - const res = await this.create(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a container only if it does not already exist." - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); - } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _c, span, updatedOptions, res, e_2; + return tslib.__generator(this, function (_d) { + switch (_d.label) { + case 0: + _c = createSpan("ContainerClient-createIfNotExists", options), span = _c.span, updatedOptions = _c.updatedOptions; + _d.label = 1; + case 1: + _d.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.create(updatedOptions)]; + case 2: + res = _d.sent(); + return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable + })]; + case 3: + e_2 = _d.sent(); + if (((_a = e_2.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when creating a container only if it does not already exist." + }); + return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_2.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_2.response })]; + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_2.message + }); + throw e_2; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Returns true if the Azure container resource represented by this client exists; false otherwise. * @@ -36871,50 +39739,63 @@ class ContainerClient extends StorageClient { * * @param options - */ - async exists(options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-exists", options); - try { - await this.getProperties({ - abortSignal: options.abortSignal, - tracingOptions: updatedOptions.tracingOptions - }); - return true; - } - catch (e) { - if (e.statusCode === 404) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when checking container existence" - }); - return false; - } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + ContainerClient.prototype.exists = function (options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_3; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("ContainerClient-exists", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.getProperties({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions + })]; + case 2: + _b.sent(); + return [2 /*return*/, true]; + case 3: + e_3 = _b.sent(); + if (e_3.statusCode === 404) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when checking container existence" + }); + return [2 /*return*/, false]; + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_3.message + }); + throw e_3; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Creates a {@link BlobClient} * * @param blobName - A blob name * @returns A new BlobClient object for the given blob name. */ - getBlobClient(blobName) { + ContainerClient.prototype.getBlobClient = function (blobName) { return new BlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); - } + }; /** * Creates an {@link AppendBlobClient} * * @param blobName - An append blob name */ - getAppendBlobClient(blobName) { + ContainerClient.prototype.getAppendBlobClient = function (blobName) { return new AppendBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); - } + }; /** * Creates a {@link BlockBlobClient} * @@ -36930,17 +39811,17 @@ class ContainerClient extends StorageClient { * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); * ``` */ - getBlockBlobClient(blobName) { + ContainerClient.prototype.getBlockBlobClient = function (blobName) { return new BlockBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); - } + }; /** * Creates a {@link PageBlobClient} * * @param blobName - A page blob name */ - getPageBlobClient(blobName) { + ContainerClient.prototype.getPageBlobClient = function (blobName) { return new PageBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); - } + }; /** * Returns all user-defined metadata and system properties for the specified * container. The data returned does not include the container's list of blobs. @@ -36953,25 +39834,37 @@ class ContainerClient extends StorageClient { * * @param options - Options to Container Get Properties operation. */ - async getProperties(options = {}) { - if (!options.conditions) { - options.conditions = {}; - } - const { span, updatedOptions } = createSpan("ContainerClient-getProperties", options); - try { - return await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + ContainerClient.prototype.getProperties = function (options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_4; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + if (!options.conditions) { + options.conditions = {}; + } + _a = createSpan("ContainerClient-getProperties", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.containerContext.getProperties(tslib.__assign(tslib.__assign({ abortSignal: options.abortSignal }, options.conditions), convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_4 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_4.message + }); + throw e_4; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Marks the specified container for deletion. The container and any blobs * contained within it are later deleted during garbage collection. @@ -36979,25 +39872,37 @@ class ContainerClient extends StorageClient { * * @param options - Options to Container Delete operation. */ - async delete(options = {}) { - if (!options.conditions) { - options.conditions = {}; - } - const { span, updatedOptions } = createSpan("ContainerClient-delete", options); - try { - return await this.containerContext.delete(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + ContainerClient.prototype.delete = function (options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_5; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + if (!options.conditions) { + options.conditions = {}; + } + _a = createSpan("ContainerClient-delete", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.containerContext.delete(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_5 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_5.message + }); + throw e_5; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Marks the specified container for deletion if it exists. The container and any blobs * contained within it are later deleted during garbage collection. @@ -37005,32 +39910,45 @@ class ContainerClient extends StorageClient { * * @param options - Options to Container Delete operation. */ - async deleteIfExists(options = {}) { + ContainerClient.prototype.deleteIfExists = function (options) { var _a, _b; - const { span, updatedOptions } = createSpan("ContainerClient-deleteIfExists", options); - try { - const res = await this.delete(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when deleting a container only if it exists." - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); - } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _c, span, updatedOptions, res, e_6; + return tslib.__generator(this, function (_d) { + switch (_d.label) { + case 0: + _c = createSpan("ContainerClient-deleteIfExists", options), span = _c.span, updatedOptions = _c.updatedOptions; + _d.label = 1; + case 1: + _d.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.delete(updatedOptions)]; + case 2: + res = _d.sent(); + return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable + })]; + case 3: + e_6 = _d.sent(); + if (((_a = e_6.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when deleting a container only if it exists." + }); + return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_6.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_6.response })]; + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_6.message + }); + throw e_6; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Sets one or more user-defined name-value pairs for the specified container. * @@ -37043,28 +39961,40 @@ class ContainerClient extends StorageClient { * If no value provided the existing metadata will be removed. * @param options - Options to Container Set Metadata operation. */ - async setMetadata(metadata, options = {}) { - if (!options.conditions) { - options.conditions = {}; - } - if (options.conditions.ifUnmodifiedSince) { - throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); - } - const { span, updatedOptions } = createSpan("ContainerClient-setMetadata", options); - try { - return await this.containerContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + ContainerClient.prototype.setMetadata = function (metadata, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_7; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + if (!options.conditions) { + options.conditions = {}; + } + if (options.conditions.ifUnmodifiedSince) { + throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); + } + _a = createSpan("ContainerClient-setMetadata", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.containerContext.setMetadata(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: metadata, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_7 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_7.message + }); + throw e_7; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Gets the permissions for the specified container. The permissions indicate * whether container data may be accessed publicly. @@ -37076,56 +40006,70 @@ class ContainerClient extends StorageClient { * * @param options - Options to Container Get Access Policy operation. */ - async getAccessPolicy(options = {}) { - if (!options.conditions) { - options.conditions = {}; - } - const { span, updatedOptions } = createSpan("ContainerClient-getAccessPolicy", options); - try { - const response = await this.containerContext.getAccessPolicy(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - const res = { - _response: response._response, - blobPublicAccess: response.blobPublicAccess, - date: response.date, - etag: response.etag, - errorCode: response.errorCode, - lastModified: response.lastModified, - requestId: response.requestId, - clientRequestId: response.clientRequestId, - signedIdentifiers: [], - version: response.version - }; - for (const identifier of response) { - let accessPolicy = undefined; - if (identifier.accessPolicy) { - accessPolicy = { - permissions: identifier.accessPolicy.permissions - }; - if (identifier.accessPolicy.expiresOn) { - accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); - } - if (identifier.accessPolicy.startsOn) { - accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn); - } + ContainerClient.prototype.getAccessPolicy = function (options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, response, res, _i, response_1, identifier, accessPolicy, e_8; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + if (!options.conditions) { + options.conditions = {}; + } + _a = createSpan("ContainerClient-getAccessPolicy", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.containerContext.getAccessPolicy(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: + response = _b.sent(); + res = { + _response: response._response, + blobPublicAccess: response.blobPublicAccess, + date: response.date, + etag: response.etag, + errorCode: response.errorCode, + lastModified: response.lastModified, + requestId: response.requestId, + clientRequestId: response.clientRequestId, + signedIdentifiers: [], + version: response.version + }; + for (_i = 0, response_1 = response; _i < response_1.length; _i++) { + identifier = response_1[_i]; + accessPolicy = undefined; + if (identifier.accessPolicy) { + accessPolicy = { + permissions: identifier.accessPolicy.permissions + }; + if (identifier.accessPolicy.expiresOn) { + accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); + } + if (identifier.accessPolicy.startsOn) { + accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn); + } + } + res.signedIdentifiers.push({ + accessPolicy: accessPolicy, + id: identifier.id + }); + } + return [2 /*return*/, res]; + case 3: + e_8 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_8.message + }); + throw e_8; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; } - res.signedIdentifiers.push({ - accessPolicy, - id: identifier.id - }); - } - return res; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Sets the permissions for the specified container. The permissions indicate * whether blobs in a container may be accessed publicly. @@ -37143,47 +40087,60 @@ class ContainerClient extends StorageClient { * @param containerAcl - Array of elements each having a unique Id and details of the access policy. * @param options - Options to Container Set Access Policy operation. */ - async setAccessPolicy(access, containerAcl, options = {}) { - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("ContainerClient-setAccessPolicy", options); - try { - const acl = []; - for (const identifier of containerAcl || []) { - acl.push({ - accessPolicy: { - expiresOn: identifier.accessPolicy.expiresOn - ? truncatedISO8061Date(identifier.accessPolicy.expiresOn) - : "", - permissions: identifier.accessPolicy.permissions, - startsOn: identifier.accessPolicy.startsOn - ? truncatedISO8061Date(identifier.accessPolicy.startsOn) - : "" - }, - id: identifier.id - }); - } - return await this.containerContext.setAccessPolicy(Object.assign({ abortSignal: options.abortSignal, access, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + ContainerClient.prototype.setAccessPolicy = function (access, containerAcl, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, acl, _i, _b, identifier, e_9; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + options.conditions = options.conditions || {}; + _a = createSpan("ContainerClient-setAccessPolicy", options), span = _a.span, updatedOptions = _a.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + acl = []; + for (_i = 0, _b = containerAcl || []; _i < _b.length; _i++) { + identifier = _b[_i]; + acl.push({ + accessPolicy: { + expiresOn: identifier.accessPolicy.expiresOn + ? truncatedISO8061Date(identifier.accessPolicy.expiresOn) + : "", + permissions: identifier.accessPolicy.permissions, + startsOn: identifier.accessPolicy.startsOn + ? truncatedISO8061Date(identifier.accessPolicy.startsOn) + : "" + }, + id: identifier.id + }); + } + return [4 /*yield*/, this.containerContext.setAccessPolicy(tslib.__assign({ abortSignal: options.abortSignal, access: access, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_9 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_9.message + }); + throw e_9; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Get a {@link BlobLeaseClient} that manages leases on the container. * * @param proposeLeaseId - Initial proposed lease Id. * @returns A new BlobLeaseClient object for managing leases on the container. */ - getBlobLeaseClient(proposeLeaseId) { + ContainerClient.prototype.getBlobLeaseClient = function (proposeLeaseId) { return new BlobLeaseClient(this, proposeLeaseId); - } + }; /** * Creates a new block blob, or updates the content of an existing block blob. * @@ -37206,27 +40163,40 @@ class ContainerClient extends StorageClient { * @param options - Options to configure the Block Blob Upload operation. * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. */ - async uploadBlockBlob(blobName, body, contentLength, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-uploadBlockBlob", options); - try { - const blockBlobClient = this.getBlockBlobClient(blobName); - const response = await blockBlobClient.upload(body, contentLength, updatedOptions); - return { - blockBlobClient, - response - }; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + ContainerClient.prototype.uploadBlockBlob = function (blobName, body, contentLength, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, blockBlobClient, response, e_10; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("ContainerClient-uploadBlockBlob", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + blockBlobClient = this.getBlockBlobClient(blobName); + return [4 /*yield*/, blockBlobClient.upload(body, contentLength, updatedOptions)]; + case 2: + response = _b.sent(); + return [2 /*return*/, { + blockBlobClient: blockBlobClient, + response: response + }]; + case 3: + e_10 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_10.message + }); + throw e_10; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Marks the specified blob or snapshot for deletion. The blob is later deleted * during garbage collection. Note that in order to delete a blob, you must delete @@ -37238,26 +40208,38 @@ class ContainerClient extends StorageClient { * @param options - Options to Blob Delete operation. * @returns Block blob deletion response data. */ - async deleteBlob(blobName, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-deleteBlob", options); - try { - let blobClient = this.getBlobClient(blobName); - if (options.versionId) { - blobClient = blobClient.withVersion(options.versionId); - } - return await blobClient.delete(updatedOptions); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + ContainerClient.prototype.deleteBlob = function (blobName, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, blobClient, e_11; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("ContainerClient-deleteBlob", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + blobClient = this.getBlobClient(blobName); + if (options.versionId) { + blobClient = blobClient.withVersion(options.versionId); + } + return [4 /*yield*/, blobClient.delete(updatedOptions)]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_11 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_11.message + }); + throw e_11; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * listBlobFlatSegment returns a single segment of blobs starting from the * specified Marker. Use an empty Marker to start enumeration from the beginning. @@ -37268,27 +40250,40 @@ class ContainerClient extends StorageClient { * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. * @param options - Options to Container List Blob Flat Segment operation. */ - async listBlobFlatSegment(marker, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-listBlobFlatSegment", options); - try { - const response = await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); - return blobItem; - }) }) }); - return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + ContainerClient.prototype.listBlobFlatSegment = function (marker, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, response, wrappedResponse, e_12; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("ContainerClient-listBlobFlatSegment", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.containerContext.listBlobFlatSegment(tslib.__assign(tslib.__assign({ marker: marker }, options), convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: + response = _b.sent(); + wrappedResponse = tslib.__assign(tslib.__assign({}, response), { _response: response._response, segment: tslib.__assign(tslib.__assign({}, response.segment), { blobItems: response.segment.blobItems.map(function (blobItemInteral) { + var blobItem = tslib.__assign(tslib.__assign({}, blobItemInteral), { tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return blobItem; + }) }) }); + return [2 /*return*/, wrappedResponse]; + case 3: + e_12 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_12.message + }); + throw e_12; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * listBlobHierarchySegment returns a single segment of blobs starting from * the specified Marker. Use an empty Marker to start enumeration from the @@ -37300,27 +40295,40 @@ class ContainerClient extends StorageClient { * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. * @param options - Options to Container List Blob Hierarchy Segment operation. */ - async listBlobHierarchySegment(delimiter, marker, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-listBlobHierarchySegment", options); - try { - const response = await this.containerContext.listBlobHierarchySegment(delimiter, Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); - return blobItem; - }) }) }); - return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + ContainerClient.prototype.listBlobHierarchySegment = function (delimiter, marker, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, response, wrappedResponse, e_13; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("ContainerClient-listBlobHierarchySegment", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.containerContext.listBlobHierarchySegment(delimiter, tslib.__assign(tslib.__assign({ marker: marker }, options), convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: + response = _b.sent(); + wrappedResponse = tslib.__assign(tslib.__assign({}, response), { _response: response._response, segment: tslib.__assign(tslib.__assign({}, response.segment), { blobItems: response.segment.blobItems.map(function (blobItemInteral) { + var blobItem = tslib.__assign(tslib.__assign({}, blobItemInteral), { tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return blobItem; + }) }) }); + return [2 /*return*/, wrappedResponse]; + case 3: + e_13 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_13.message + }); + throw e_13; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse * @@ -37333,42 +40341,81 @@ class ContainerClient extends StorageClient { * items. The marker value is opaque to the client. * @param options - Options to list blobs operation. */ - listSegments(marker, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listSegments_1() { - let listBlobsFlatSegmentResponse; - if (!!marker || marker === undefined) { - do { - listBlobsFlatSegmentResponse = yield tslib.__await(this.listBlobFlatSegment(marker, options)); - marker = listBlobsFlatSegmentResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(listBlobsFlatSegmentResponse)); - } while (marker); - } + ContainerClient.prototype.listSegments = function (marker, options) { + if (options === void 0) { options = {}; } + return tslib.__asyncGenerator(this, arguments, function listSegments_1() { + var listBlobsFlatSegmentResponse; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + if (!(!!marker || marker === undefined)) return [3 /*break*/, 7]; + _a.label = 1; + case 1: return [4 /*yield*/, tslib.__await(this.listBlobFlatSegment(marker, options))]; + case 2: + listBlobsFlatSegmentResponse = _a.sent(); + marker = listBlobsFlatSegmentResponse.continuationToken; + return [4 /*yield*/, tslib.__await(listBlobsFlatSegmentResponse)]; + case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_a.sent()])]; + case 4: return [4 /*yield*/, _a.sent()]; + case 5: + _a.sent(); + _a.label = 6; + case 6: + if (marker) return [3 /*break*/, 1]; + _a.label = 7; + case 7: return [2 /*return*/]; + } + }); }); - } + }; /** * Returns an AsyncIterableIterator of {@link BlobItem} objects * * @param options - Options to list blobs operation. */ - listItems(options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listItems_1() { - var e_1, _a; - let marker; - try { - for (var _b = tslib.__asyncValues(this.listSegments(marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const listBlobsFlatSegmentResponse = _c.value; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems))); + ContainerClient.prototype.listItems = function (options) { + if (options === void 0) { options = {}; } + return tslib.__asyncGenerator(this, arguments, function listItems_1() { + var marker, _a, _b, listBlobsFlatSegmentResponse, e_14_1; + var e_14, _c; + return tslib.__generator(this, function (_d) { + switch (_d.label) { + case 0: + _d.trys.push([0, 7, 8, 13]); + _a = tslib.__asyncValues(this.listSegments(marker, options)); + _d.label = 1; + case 1: return [4 /*yield*/, tslib.__await(_a.next())]; + case 2: + if (!(_b = _d.sent(), !_b.done)) return [3 /*break*/, 6]; + listBlobsFlatSegmentResponse = _b.value; + return [5 /*yield**/, tslib.__values(tslib.__asyncDelegator(tslib.__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems)))]; + case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_d.sent()])]; + case 4: + _d.sent(); + _d.label = 5; + case 5: return [3 /*break*/, 1]; + case 6: return [3 /*break*/, 13]; + case 7: + e_14_1 = _d.sent(); + e_14 = { error: e_14_1 }; + return [3 /*break*/, 13]; + case 8: + _d.trys.push([8, , 11, 12]); + if (!(_b && !_b.done && (_c = _a.return))) return [3 /*break*/, 10]; + return [4 /*yield*/, tslib.__await(_c.call(_a))]; + case 9: + _d.sent(); + _d.label = 10; + case 10: return [3 /*break*/, 12]; + case 11: + if (e_14) throw e_14.error; + return [7 /*endfinally*/]; + case 12: return [7 /*endfinally*/]; + case 13: return [2 /*return*/]; } - } - catch (e_1_1) { e_1 = { error: e_1_1 }; } - finally { - try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); - } - finally { if (e_1) throw e_1.error; } - } + }); }); - } + }; /** * Returns an async iterable iterator to list all the blobs * under the specified account. @@ -37439,8 +40486,11 @@ class ContainerClient extends StorageClient { * @param options - Options to list blobs. * @returns An asyncIterableIterator that supports paging. */ - listBlobsFlat(options = {}) { - const include = []; + ContainerClient.prototype.listBlobsFlat = function (options) { + var _a; + var _this = this; + if (options === void 0) { options = {}; } + var include = []; if (options.includeCopy) { include.push("copy"); } @@ -37465,30 +40515,32 @@ class ContainerClient extends StorageClient { if (options.prefix === "") { options.prefix = undefined; } - const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); + var updatedOptions = tslib.__assign(tslib.__assign({}, options), (include.length > 0 ? { include: include } : {})); // AsyncIterableIterator to iterate over blobs - const iter = this.listItems(updatedOptions); - return { - /** - * The next method, part of the iteration protocol - */ - next() { - return iter.next(); + var iter = this.listItems(updatedOptions); + return _a = { + /** + * The next method, part of the iteration protocol + */ + next: function () { + return iter.next(); + } }, /** * The connection to the async iterator, part of the iteration protocol */ - [Symbol.asyncIterator]() { + _a[Symbol.asyncIterator] = function () { return this; }, /** * Return an AsyncIterableIterator that works a page at a time */ - byPage: (settings = {}) => { - return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); - } - }; - } + _a.byPage = function (settings) { + if (settings === void 0) { settings = {}; } + return _this.listSegments(settings.continuationToken, tslib.__assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + _a; + }; /** * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse * @@ -37502,51 +40554,106 @@ class ContainerClient extends StorageClient { * items. The marker value is opaque to the client. * @param options - Options to list blobs operation. */ - listHierarchySegments(delimiter, marker, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listHierarchySegments_1() { - let listBlobsHierarchySegmentResponse; - if (!!marker || marker === undefined) { - do { - listBlobsHierarchySegmentResponse = yield tslib.__await(this.listBlobHierarchySegment(delimiter, marker, options)); - marker = listBlobsHierarchySegmentResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(listBlobsHierarchySegmentResponse)); - } while (marker); - } + ContainerClient.prototype.listHierarchySegments = function (delimiter, marker, options) { + if (options === void 0) { options = {}; } + return tslib.__asyncGenerator(this, arguments, function listHierarchySegments_1() { + var listBlobsHierarchySegmentResponse; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + if (!(!!marker || marker === undefined)) return [3 /*break*/, 7]; + _a.label = 1; + case 1: return [4 /*yield*/, tslib.__await(this.listBlobHierarchySegment(delimiter, marker, options))]; + case 2: + listBlobsHierarchySegmentResponse = _a.sent(); + marker = listBlobsHierarchySegmentResponse.continuationToken; + return [4 /*yield*/, tslib.__await(listBlobsHierarchySegmentResponse)]; + case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_a.sent()])]; + case 4: return [4 /*yield*/, _a.sent()]; + case 5: + _a.sent(); + _a.label = 6; + case 6: + if (marker) return [3 /*break*/, 1]; + _a.label = 7; + case 7: return [2 /*return*/]; + } + }); }); - } + }; /** * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. * * @param delimiter - The character or string used to define the virtual hierarchy * @param options - Options to list blobs operation. */ - listItemsByHierarchy(delimiter, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listItemsByHierarchy_1() { - var e_2, _a; - let marker; - try { - for (var _b = tslib.__asyncValues(this.listHierarchySegments(delimiter, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const listBlobsHierarchySegmentResponse = _c.value; - const segment = listBlobsHierarchySegmentResponse.segment; - if (segment.blobPrefixes) { - for (const prefix of segment.blobPrefixes) { - yield yield tslib.__await(Object.assign({ kind: "prefix" }, prefix)); - } - } - for (const blob of segment.blobItems) { - yield yield tslib.__await(Object.assign({ kind: "blob" }, blob)); - } - } - } - catch (e_2_1) { e_2 = { error: e_2_1 }; } - finally { - try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + ContainerClient.prototype.listItemsByHierarchy = function (delimiter, options) { + if (options === void 0) { options = {}; } + return tslib.__asyncGenerator(this, arguments, function listItemsByHierarchy_1() { + var marker, _a, _b, listBlobsHierarchySegmentResponse, segment, _i, _c, prefix, _d, _e, blob, e_15_1; + var e_15, _f; + return tslib.__generator(this, function (_g) { + switch (_g.label) { + case 0: + _g.trys.push([0, 14, 15, 20]); + _a = tslib.__asyncValues(this.listHierarchySegments(delimiter, marker, options)); + _g.label = 1; + case 1: return [4 /*yield*/, tslib.__await(_a.next())]; + case 2: + if (!(_b = _g.sent(), !_b.done)) return [3 /*break*/, 13]; + listBlobsHierarchySegmentResponse = _b.value; + segment = listBlobsHierarchySegmentResponse.segment; + if (!segment.blobPrefixes) return [3 /*break*/, 7]; + _i = 0, _c = segment.blobPrefixes; + _g.label = 3; + case 3: + if (!(_i < _c.length)) return [3 /*break*/, 7]; + prefix = _c[_i]; + return [4 /*yield*/, tslib.__await(tslib.__assign({ kind: "prefix" }, prefix))]; + case 4: return [4 /*yield*/, _g.sent()]; + case 5: + _g.sent(); + _g.label = 6; + case 6: + _i++; + return [3 /*break*/, 3]; + case 7: + _d = 0, _e = segment.blobItems; + _g.label = 8; + case 8: + if (!(_d < _e.length)) return [3 /*break*/, 12]; + blob = _e[_d]; + return [4 /*yield*/, tslib.__await(tslib.__assign({ kind: "blob" }, blob))]; + case 9: return [4 /*yield*/, _g.sent()]; + case 10: + _g.sent(); + _g.label = 11; + case 11: + _d++; + return [3 /*break*/, 8]; + case 12: return [3 /*break*/, 1]; + case 13: return [3 /*break*/, 20]; + case 14: + e_15_1 = _g.sent(); + e_15 = { error: e_15_1 }; + return [3 /*break*/, 20]; + case 15: + _g.trys.push([15, , 18, 19]); + if (!(_b && !_b.done && (_f = _a.return))) return [3 /*break*/, 17]; + return [4 /*yield*/, tslib.__await(_f.call(_a))]; + case 16: + _g.sent(); + _g.label = 17; + case 17: return [3 /*break*/, 19]; + case 18: + if (e_15) throw e_15.error; + return [7 /*endfinally*/]; + case 19: return [7 /*endfinally*/]; + case 20: return [2 /*return*/]; } - finally { if (e_2) throw e_2.error; } - } + }); }); - } + }; /** * Returns an async iterable iterator to list all the blobs by hierarchy. * under the specified account. @@ -37623,11 +40730,14 @@ class ContainerClient extends StorageClient { * @param delimiter - The character or string used to define the virtual hierarchy * @param options - Options to list blobs operation. */ - listBlobsByHierarchy(delimiter, options = {}) { + ContainerClient.prototype.listBlobsByHierarchy = function (delimiter, options) { + var _a; + var _this = this; + if (options === void 0) { options = {}; } if (delimiter === "") { throw new RangeError("delimiter should contain one or more characters"); } - const include = []; + var include = []; if (options.includeCopy) { include.push("copy"); } @@ -37652,39 +40762,45 @@ class ContainerClient extends StorageClient { if (options.prefix === "") { options.prefix = undefined; } - const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); + var updatedOptions = tslib.__assign(tslib.__assign({}, options), (include.length > 0 ? { include: include } : {})); // AsyncIterableIterator to iterate over blob prefixes and blobs - const iter = this.listItemsByHierarchy(delimiter, updatedOptions); - return { - /** - * The next method, part of the iteration protocol - */ - async next() { - return iter.next(); + var iter = this.listItemsByHierarchy(delimiter, updatedOptions); + return _a = { + /** + * The next method, part of the iteration protocol + */ + next: function () { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + return [2 /*return*/, iter.next()]; + }); + }); + } }, /** * The connection to the async iterator, part of the iteration protocol */ - [Symbol.asyncIterator]() { + _a[Symbol.asyncIterator] = function () { return this; }, /** * Return an AsyncIterableIterator that works a page at a time */ - byPage: (settings = {}) => { - return this.listHierarchySegments(delimiter, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); - } - }; - } - getContainerNameFromUrl() { - let containerName; + _a.byPage = function (settings) { + if (settings === void 0) { settings = {}; } + return _this.listHierarchySegments(delimiter, settings.continuationToken, tslib.__assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + _a; + }; + ContainerClient.prototype.getContainerNameFromUrl = function () { + var containerName; try { // URL may look like the following // "https://myaccount.blob.core.windows.net/mycontainer?sasString"; // "https://myaccount.blob.core.windows.net/mycontainer"; // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername` // http://localhost:10001/devstoreaccount1/containername - const parsedUrl = coreHttp.URLBuilder.parse(this.url); + var parsedUrl = coreHttp.URLBuilder.parse(this.url); if (parsedUrl.getHost().split(".")[1] === "blob") { // "https://myaccount.blob.core.windows.net/containername". // "https://customdomain.com/containername". @@ -37712,7 +40828,7 @@ class ContainerClient extends StorageClient { catch (error) { throw new Error("Unable to extract containerName with provided information."); } - } + }; /** * Only available for ContainerClient constructed with a shared key credential. * @@ -37724,15 +40840,16 @@ class ContainerClient extends StorageClient { * @param options - Optional parameters. * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ - generateSasUrl(options) { - return new Promise((resolve) => { - if (!(this.credential instanceof StorageSharedKeyCredential)) { + ContainerClient.prototype.generateSasUrl = function (options) { + var _this = this; + return new Promise(function (resolve) { + if (!(_this.credential instanceof StorageSharedKeyCredential)) { throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); } - const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), this.credential).toString(); - resolve(appendToURLQuery(this.url, sas)); + var sas = generateBlobSASQueryParameters(tslib.__assign({ containerName: _this._containerName }, options), _this.credential).toString(); + resolve(appendToURLQuery(_this.url, sas)); }); - } + }; /** * Creates a BlobBatchClient object to conduct batch operations. * @@ -37740,10 +40857,11 @@ class ContainerClient extends StorageClient { * * @returns A new BlobBatchClient object for this container. */ - getBlobBatchClient() { + ContainerClient.prototype.getBlobBatchClient = function () { return new BlobBatchClient(this.url, this.pipeline); - } -} + }; + return ContainerClient; +}(StorageClient)); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. @@ -37756,8 +40874,8 @@ class ContainerClient extends StorageClient { * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but * the order of the permissions is particular and this class guarantees correctness. */ -class AccountSASPermissions { - constructor() { +var AccountSASPermissions = /** @class */ (function () { + function AccountSASPermissions() { /** * Permission to read resources and list queues and tables granted. */ @@ -37808,9 +40926,10 @@ class AccountSASPermissions { * * @param permissions - */ - static parse(permissions) { - const accountSASPermissions = new AccountSASPermissions(); - for (const c of permissions) { + AccountSASPermissions.parse = function (permissions) { + var accountSASPermissions = new AccountSASPermissions(); + for (var _i = 0, permissions_1 = permissions; _i < permissions_1.length; _i++) { + var c = permissions_1[_i]; switch (c) { case "r": accountSASPermissions.read = true; @@ -37846,19 +40965,19 @@ class AccountSASPermissions { accountSASPermissions.filter = true; break; default: - throw new RangeError(`Invalid permission character: ${c}`); + throw new RangeError("Invalid permission character: " + c); } } return accountSASPermissions; - } + }; /** * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it * and boolean values for them. * * @param permissionLike - */ - static from(permissionLike) { - const accountSASPermissions = new AccountSASPermissions(); + AccountSASPermissions.from = function (permissionLike) { + var accountSASPermissions = new AccountSASPermissions(); if (permissionLike.read) { accountSASPermissions.read = true; } @@ -37893,7 +41012,7 @@ class AccountSASPermissions { accountSASPermissions.process = true; } return accountSASPermissions; - } + }; /** * Produces the SAS permissions string for an Azure Storage account. * Call this method to set AccountSASSignatureValues Permissions field. @@ -37904,11 +41023,11 @@ class AccountSASPermissions { * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas * */ - toString() { + AccountSASPermissions.prototype.toString = function () { // The order of the characters should be as specified here to ensure correctness: // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas // Use a string array instead of string concatenating += operator for performance - const permissions = []; + var permissions = []; if (this.read) { permissions.push("r"); } @@ -37943,8 +41062,9 @@ class AccountSASPermissions { permissions.push("p"); } return permissions.join(""); - } -} + }; + return AccountSASPermissions; +}()); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. @@ -37957,8 +41077,8 @@ class AccountSASPermissions { * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but * the order of the resources is particular and this class guarantees correctness. */ -class AccountSASResourceTypes { - constructor() { +var AccountSASResourceTypes = /** @class */ (function () { + function AccountSASResourceTypes() { /** * Permission to access service level APIs granted. */ @@ -37978,9 +41098,10 @@ class AccountSASResourceTypes { * * @param resourceTypes - */ - static parse(resourceTypes) { - const accountSASResourceTypes = new AccountSASResourceTypes(); - for (const c of resourceTypes) { + AccountSASResourceTypes.parse = function (resourceTypes) { + var accountSASResourceTypes = new AccountSASResourceTypes(); + for (var _i = 0, resourceTypes_1 = resourceTypes; _i < resourceTypes_1.length; _i++) { + var c = resourceTypes_1[_i]; switch (c) { case "s": accountSASResourceTypes.service = true; @@ -37992,19 +41113,19 @@ class AccountSASResourceTypes { accountSASResourceTypes.object = true; break; default: - throw new RangeError(`Invalid resource type: ${c}`); + throw new RangeError("Invalid resource type: " + c); } } return accountSASResourceTypes; - } + }; /** * Converts the given resource types to a string. * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas * */ - toString() { - const resourceTypes = []; + AccountSASResourceTypes.prototype.toString = function () { + var resourceTypes = []; if (this.service) { resourceTypes.push("s"); } @@ -38015,8 +41136,9 @@ class AccountSASResourceTypes { resourceTypes.push("o"); } return resourceTypes.join(""); - } -} + }; + return AccountSASResourceTypes; +}()); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. @@ -38029,8 +41151,8 @@ class AccountSASResourceTypes { * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but * the order of the services is particular and this class guarantees correctness. */ -class AccountSASServices { - constructor() { +var AccountSASServices = /** @class */ (function () { + function AccountSASServices() { /** * Permission to access blob resources granted. */ @@ -38054,9 +41176,10 @@ class AccountSASServices { * * @param services - */ - static parse(services) { - const accountSASServices = new AccountSASServices(); - for (const c of services) { + AccountSASServices.parse = function (services) { + var accountSASServices = new AccountSASServices(); + for (var _i = 0, services_1 = services; _i < services_1.length; _i++) { + var c = services_1[_i]; switch (c) { case "b": accountSASServices.blob = true; @@ -38071,17 +41194,17 @@ class AccountSASServices { accountSASServices.table = true; break; default: - throw new RangeError(`Invalid service character: ${c}`); + throw new RangeError("Invalid service character: " + c); } } return accountSASServices; - } + }; /** * Converts the given services to a string. * */ - toString() { - const services = []; + AccountSASServices.prototype.toString = function () { + var services = []; if (this.blob) { services.push("b"); } @@ -38095,8 +41218,9 @@ class AccountSASServices { services.push("f"); } return services.join(""); - } -} + }; + return AccountSASServices; +}()); // Copyright (c) Microsoft Corporation. /** @@ -38111,7 +41235,7 @@ class AccountSASServices { * @param sharedKeyCredential - */ function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) { - const version = accountSASSignatureValues.version + var version = accountSASSignatureValues.version ? accountSASSignatureValues.version : SERVICE_VERSION; if (accountSASSignatureValues.permissions && @@ -38129,10 +41253,10 @@ function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyC version < "2019-12-12") { throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); } - const parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); - const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); - const parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); - const stringToSign = [ + var parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); + var parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); + var parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); + var stringToSign = [ sharedKeyCredential.accountName, parsedPermissions, parsedServices, @@ -38146,7 +41270,7 @@ function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyC version, "" // Account SAS requires an additional newline character ].join("\n"); - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + var signature = sharedKeyCredential.computeHMACSHA256(stringToSign); return new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange); } @@ -38154,9 +41278,11 @@ function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyC * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you * to manipulate blob containers. */ -class BlobServiceClient extends StorageClient { - constructor(url, credentialOrPipeline, options) { - let pipeline; +var BlobServiceClient = /** @class */ (function (_super) { + tslib.__extends(BlobServiceClient, _super); + function BlobServiceClient(url, credentialOrPipeline, options) { + var _this = this; + var pipeline; if (credentialOrPipeline instanceof Pipeline) { pipeline = credentialOrPipeline; } @@ -38169,8 +41295,9 @@ class BlobServiceClient extends StorageClient { // The second parameter is undefined. Use anonymous credential pipeline = newPipeline(new AnonymousCredential(), options); } - super(url, pipeline); - this.serviceContext = new Service(this.storageClientContext); + _this = _super.call(this, url, pipeline) || this; + _this.serviceContext = new Service(_this.storageClientContext); + return _this; } /** * @@ -38184,25 +41311,25 @@ class BlobServiceClient extends StorageClient { * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` * @param options - Optional. Options to configure the HTTP pipeline. */ - static fromConnectionString(connectionString, options) { + BlobServiceClient.fromConnectionString = function (connectionString, options) { options = options || {}; - const extractedCreds = extractConnectionStringParts(connectionString); + var extractedCreds = extractConnectionStringParts(connectionString); if (extractedCreds.kind === "AccountConnString") { { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); - const pipeline = newPipeline(sharedKeyCredential, options); + var pipeline = newPipeline(sharedKeyCredential, options); return new BlobServiceClient(extractedCreds.url, pipeline); } } else if (extractedCreds.kind === "SASConnString") { - const pipeline = newPipeline(new AnonymousCredential(), options); + var pipeline = newPipeline(new AnonymousCredential(), options); return new BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline); } else { throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } - } + }; /** * Creates a {@link ContainerClient} object * @@ -38215,9 +41342,9 @@ class BlobServiceClient extends StorageClient { * const containerClient = blobServiceClient.getContainerClient(""); * ``` */ - getContainerClient(containerName) { + BlobServiceClient.prototype.getContainerClient = function (containerName) { return new ContainerClient(appendToURLPath(this.url, encodeURIComponent(containerName)), this.pipeline); - } + }; /** * Create a Blob container. * @@ -38225,27 +41352,40 @@ class BlobServiceClient extends StorageClient { * @param options - Options to configure Container Create operation. * @returns Container creation response and the corresponding container client. */ - async createContainer(containerName, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-createContainer", options); - try { - const containerClient = this.getContainerClient(containerName); - const containerCreateResponse = await containerClient.create(updatedOptions); - return { - containerClient, - containerCreateResponse - }; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobServiceClient.prototype.createContainer = function (containerName, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, containerClient, containerCreateResponse, e_1; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobServiceClient-createContainer", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + containerClient = this.getContainerClient(containerName); + return [4 /*yield*/, containerClient.create(updatedOptions)]; + case 2: + containerCreateResponse = _b.sent(); + return [2 /*return*/, { + containerClient: containerClient, + containerCreateResponse: containerCreateResponse + }]; + case 3: + e_1 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_1.message + }); + throw e_1; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Deletes a Blob container. * @@ -38253,23 +41393,35 @@ class BlobServiceClient extends StorageClient { * @param options - Options to configure Container Delete operation. * @returns Container deletion response. */ - async deleteContainer(containerName, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-deleteContainer", options); - try { - const containerClient = this.getContainerClient(containerName); - return await containerClient.delete(updatedOptions); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobServiceClient.prototype.deleteContainer = function (containerName, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, containerClient, e_2; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobServiceClient-deleteContainer", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + containerClient = this.getContainerClient(containerName); + return [4 /*yield*/, containerClient.delete(updatedOptions)]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_2 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_2.message + }); + throw e_2; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Restore a previously deleted Blob container. * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. @@ -38279,27 +41431,39 @@ class BlobServiceClient extends StorageClient { * @param options - Options to configure Container Restore operation. * @returns Container deletion response. */ - async undeleteContainer(deletedContainerName, deletedContainerVersion, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-undeleteContainer", options); - try { - const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName); - // Hack to access a protected member. - const containerContext = new Container(containerClient["storageClientContext"]); - const containerUndeleteResponse = await containerContext.restore(Object.assign({ deletedContainerName, - deletedContainerVersion }, updatedOptions)); - return { containerClient, containerUndeleteResponse }; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobServiceClient.prototype.undeleteContainer = function (deletedContainerName, deletedContainerVersion, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, containerClient, containerContext, containerUndeleteResponse, e_3; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobServiceClient-undeleteContainer", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName); + containerContext = new Container(containerClient["storageClientContext"]); + return [4 /*yield*/, containerContext.restore(tslib.__assign({ deletedContainerName: deletedContainerName, + deletedContainerVersion: deletedContainerVersion }, updatedOptions))]; + case 2: + containerUndeleteResponse = _b.sent(); + return [2 /*return*/, { containerClient: containerClient, containerUndeleteResponse: containerUndeleteResponse }]; + case 3: + e_3 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_3.message + }); + throw e_3; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Rename an existing Blob Container. * @@ -38308,27 +41472,39 @@ class BlobServiceClient extends StorageClient { * @param options - Options to configure Container Rename operation. */ // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready. - async renameContainer(sourceContainerName, destinationContainerName, options = {}) { + BlobServiceClient.prototype.renameContainer = function (sourceContainerName, destinationContainerName, options) { var _a; - const { span, updatedOptions } = createSpan("BlobServiceClient-renameContainer", options); - try { - const containerClient = this.getContainerClient(destinationContainerName); - // Hack to access a protected member. - const containerContext = new Container(containerClient["storageClientContext"]); - const containerRenameResponse = await containerContext.rename(sourceContainerName, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId })); - return { containerClient, containerRenameResponse }; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, containerClient, containerContext, containerRenameResponse, e_4; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("BlobServiceClient-renameContainer", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + containerClient = this.getContainerClient(destinationContainerName); + containerContext = new Container(containerClient["storageClientContext"]); + return [4 /*yield*/, containerContext.rename(sourceContainerName, tslib.__assign(tslib.__assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId }))]; + case 2: + containerRenameResponse = _c.sent(); + return [2 /*return*/, { containerClient: containerClient, containerRenameResponse: containerRenameResponse }]; + case 3: + e_4 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_4.message + }); + throw e_4; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Gets the properties of a storage account’s Blob service, including properties * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. @@ -38337,22 +41513,34 @@ class BlobServiceClient extends StorageClient { * @param options - Options to the Service Get Properties operation. * @returns Response data for the Service Get Properties operation. */ - async getProperties(options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getProperties", options); - try { - return await this.serviceContext.getProperties(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobServiceClient.prototype.getProperties = function (options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_5; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobServiceClient-getProperties", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.serviceContext.getProperties(tslib.__assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_5 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_5.message + }); + throw e_5; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Sets properties for a storage account’s Blob service endpoint, including properties * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. @@ -38362,22 +41550,34 @@ class BlobServiceClient extends StorageClient { * @param options - Options to the Service Set Properties operation. * @returns Response data for the Service Set Properties operation. */ - async setProperties(properties, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-setProperties", options); - try { - return await this.serviceContext.setProperties(properties, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobServiceClient.prototype.setProperties = function (properties, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_6; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobServiceClient-setProperties", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.serviceContext.setProperties(properties, tslib.__assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_6 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_6.message + }); + throw e_6; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Retrieves statistics related to replication for the Blob service. It is only * available on the secondary location endpoint when read-access geo-redundant @@ -38387,22 +41587,34 @@ class BlobServiceClient extends StorageClient { * @param options - Options to the Service Get Statistics operation. * @returns Response data for the Service Get Statistics operation. */ - async getStatistics(options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getStatistics", options); - try { - return await this.serviceContext.getStatistics(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobServiceClient.prototype.getStatistics = function (options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_7; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobServiceClient-getStatistics", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.serviceContext.getStatistics(tslib.__assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_7 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_7.message + }); + throw e_7; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * The Get Account Information operation returns the sku name and account kind * for the specified account. @@ -38413,22 +41625,34 @@ class BlobServiceClient extends StorageClient { * @param options - Options to the Service Get Account Info operation. * @returns Response data for the Service Get Account Info operation. */ - async getAccountInfo(options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getAccountInfo", options); - try { - return await this.serviceContext.getAccountInfo(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobServiceClient.prototype.getAccountInfo = function (options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_8; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobServiceClient-getAccountInfo", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.serviceContext.getAccountInfo(tslib.__assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_8 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_8.message + }); + throw e_8; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Returns a list of the containers under the specified account. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2 @@ -38443,22 +41667,34 @@ class BlobServiceClient extends StorageClient { * @param options - Options to the Service List Container Segment operation. * @returns Response data for the Service List Container Segment operation. */ - async listContainersSegment(marker, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-listContainersSegment", options); - try { - return await this.serviceContext.listContainersSegment(Object.assign(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include }), convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobServiceClient.prototype.listContainersSegment = function (marker, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_9; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobServiceClient-listContainersSegment", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.serviceContext.listContainersSegment(tslib.__assign(tslib.__assign(tslib.__assign({ abortSignal: options.abortSignal, marker: marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include }), convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_9 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_9.message + }); + throw e_9; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * The Filter Blobs operation enables callers to list blobs across all containers whose tags * match a given search expression. Filter blobs searches across all containers within a @@ -38477,31 +41713,44 @@ class BlobServiceClient extends StorageClient { * items. The marker value is opaque to the client. * @param options - Options to find blobs by tags. */ - async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-findBlobsByTagsSegment", options); - try { - const response = await this.serviceContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { - var _a; - let tagValue = ""; - if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { - tagValue = blob.tags.blobTagSet[0].value; - } - return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); - }) }); - return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobServiceClient.prototype.findBlobsByTagsSegment = function (tagFilterSqlExpression, marker, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, response, wrappedResponse, e_10; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobServiceClient-findBlobsByTagsSegment", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.serviceContext.filterBlobs(tslib.__assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker: marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: + response = _b.sent(); + wrappedResponse = tslib.__assign(tslib.__assign({}, response), { _response: response._response, blobs: response.blobs.map(function (blob) { + var _a; + var tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return tslib.__assign(tslib.__assign({}, blob), { tags: toTags(blob.tags), tagValue: tagValue }); + }) }); + return [2 /*return*/, wrappedResponse]; + case 3: + e_10 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_10.message + }); + throw e_10; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. * @@ -38518,19 +41767,33 @@ class BlobServiceClient extends StorageClient { * items. The marker value is opaque to the client. * @param options - Options to find blobs by tags. */ - findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { - let response; - if (!!marker || marker === undefined) { - do { - response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); - response.blobs = response.blobs || []; - marker = response.continuationToken; - yield yield tslib.__await(response); - } while (marker); - } + BlobServiceClient.prototype.findBlobsByTagsSegments = function (tagFilterSqlExpression, marker, options) { + if (options === void 0) { options = {}; } + return tslib.__asyncGenerator(this, arguments, function findBlobsByTagsSegments_1() { + var response; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + if (!(!!marker || marker === undefined)) return [3 /*break*/, 6]; + _a.label = 1; + case 1: return [4 /*yield*/, tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options))]; + case 2: + response = _a.sent(); + response.blobs = response.blobs || []; + marker = response.continuationToken; + return [4 /*yield*/, tslib.__await(response)]; + case 3: return [4 /*yield*/, _a.sent()]; + case 4: + _a.sent(); + _a.label = 5; + case 5: + if (marker) return [3 /*break*/, 1]; + _a.label = 6; + case 6: return [2 /*return*/]; + } + }); }); - } + }; /** * Returns an AsyncIterableIterator for blobs. * @@ -38540,25 +41803,49 @@ class BlobServiceClient extends StorageClient { * however, only a subset of the OData filter syntax is supported in the Blob service. * @param options - Options to findBlobsByTagsItems. */ - findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { - var e_1, _a; - let marker; - try { - for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const segment = _c.value; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); - } - } - catch (e_1_1) { e_1 = { error: e_1_1 }; } - finally { - try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + BlobServiceClient.prototype.findBlobsByTagsItems = function (tagFilterSqlExpression, options) { + if (options === void 0) { options = {}; } + return tslib.__asyncGenerator(this, arguments, function findBlobsByTagsItems_1() { + var marker, _a, _b, segment, e_11_1; + var e_11, _c; + return tslib.__generator(this, function (_d) { + switch (_d.label) { + case 0: + _d.trys.push([0, 7, 8, 13]); + _a = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)); + _d.label = 1; + case 1: return [4 /*yield*/, tslib.__await(_a.next())]; + case 2: + if (!(_b = _d.sent(), !_b.done)) return [3 /*break*/, 6]; + segment = _b.value; + return [5 /*yield**/, tslib.__values(tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs)))]; + case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_d.sent()])]; + case 4: + _d.sent(); + _d.label = 5; + case 5: return [3 /*break*/, 1]; + case 6: return [3 /*break*/, 13]; + case 7: + e_11_1 = _d.sent(); + e_11 = { error: e_11_1 }; + return [3 /*break*/, 13]; + case 8: + _d.trys.push([8, , 11, 12]); + if (!(_b && !_b.done && (_c = _a.return))) return [3 /*break*/, 10]; + return [4 /*yield*/, tslib.__await(_c.call(_a))]; + case 9: + _d.sent(); + _d.label = 10; + case 10: return [3 /*break*/, 12]; + case 11: + if (e_11) throw e_11.error; + return [7 /*endfinally*/]; + case 12: return [7 /*endfinally*/]; + case 13: return [2 /*return*/]; } - finally { if (e_1) throw e_1.error; } - } + }); }); - } + }; /** * Returns an async iterable iterator to find all blobs with specified tag * under the specified account. @@ -38638,31 +41925,36 @@ class BlobServiceClient extends StorageClient { * however, only a subset of the OData filter syntax is supported in the Blob service. * @param options - Options to find blobs by tags. */ - findBlobsByTags(tagFilterSqlExpression, options = {}) { + BlobServiceClient.prototype.findBlobsByTags = function (tagFilterSqlExpression, options) { + var _a; + var _this = this; + if (options === void 0) { options = {}; } // AsyncIterableIterator to iterate over blobs - const listSegmentOptions = Object.assign({}, options); - const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); - return { - /** - * The next method, part of the iteration protocol - */ - next() { - return iter.next(); + var listSegmentOptions = tslib.__assign({}, options); + var iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return _a = { + /** + * The next method, part of the iteration protocol + */ + next: function () { + return iter.next(); + } }, /** * The connection to the async iterator, part of the iteration protocol */ - [Symbol.asyncIterator]() { + _a[Symbol.asyncIterator] = function () { return this; }, /** * Return an AsyncIterableIterator that works a page at a time */ - byPage: (settings = {}) => { - return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); - } - }; - } + _a.byPage = function (settings) { + if (settings === void 0) { settings = {}; } + return _this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, tslib.__assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + _a; + }; /** * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses * @@ -38675,44 +41967,83 @@ class BlobServiceClient extends StorageClient { * items. The marker value is opaque to the client. * @param options - Options to list containers operation. */ - listSegments(marker, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listSegments_1() { - let listContainersSegmentResponse; - if (!!marker || marker === undefined) { - do { - listContainersSegmentResponse = yield tslib.__await(this.listContainersSegment(marker, options)); - listContainersSegmentResponse.containerItems = - listContainersSegmentResponse.containerItems || []; - marker = listContainersSegmentResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(listContainersSegmentResponse)); - } while (marker); - } + BlobServiceClient.prototype.listSegments = function (marker, options) { + if (options === void 0) { options = {}; } + return tslib.__asyncGenerator(this, arguments, function listSegments_1() { + var listContainersSegmentResponse; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + if (!(!!marker || marker === undefined)) return [3 /*break*/, 7]; + _a.label = 1; + case 1: return [4 /*yield*/, tslib.__await(this.listContainersSegment(marker, options))]; + case 2: + listContainersSegmentResponse = _a.sent(); + listContainersSegmentResponse.containerItems = + listContainersSegmentResponse.containerItems || []; + marker = listContainersSegmentResponse.continuationToken; + return [4 /*yield*/, tslib.__await(listContainersSegmentResponse)]; + case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_a.sent()])]; + case 4: return [4 /*yield*/, _a.sent()]; + case 5: + _a.sent(); + _a.label = 6; + case 6: + if (marker) return [3 /*break*/, 1]; + _a.label = 7; + case 7: return [2 /*return*/]; + } + }); }); - } + }; /** * Returns an AsyncIterableIterator for Container Items * * @param options - Options to list containers operation. */ - listItems(options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listItems_1() { - var e_2, _a; - let marker; - try { - for (var _b = tslib.__asyncValues(this.listSegments(marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const segment = _c.value; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems))); - } - } - catch (e_2_1) { e_2 = { error: e_2_1 }; } - finally { - try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + BlobServiceClient.prototype.listItems = function (options) { + if (options === void 0) { options = {}; } + return tslib.__asyncGenerator(this, arguments, function listItems_1() { + var marker, _a, _b, segment, e_12_1; + var e_12, _c; + return tslib.__generator(this, function (_d) { + switch (_d.label) { + case 0: + _d.trys.push([0, 7, 8, 13]); + _a = tslib.__asyncValues(this.listSegments(marker, options)); + _d.label = 1; + case 1: return [4 /*yield*/, tslib.__await(_a.next())]; + case 2: + if (!(_b = _d.sent(), !_b.done)) return [3 /*break*/, 6]; + segment = _b.value; + return [5 /*yield**/, tslib.__values(tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems)))]; + case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_d.sent()])]; + case 4: + _d.sent(); + _d.label = 5; + case 5: return [3 /*break*/, 1]; + case 6: return [3 /*break*/, 13]; + case 7: + e_12_1 = _d.sent(); + e_12 = { error: e_12_1 }; + return [3 /*break*/, 13]; + case 8: + _d.trys.push([8, , 11, 12]); + if (!(_b && !_b.done && (_c = _a.return))) return [3 /*break*/, 10]; + return [4 /*yield*/, tslib.__await(_c.call(_a))]; + case 9: + _d.sent(); + _d.label = 10; + case 10: return [3 /*break*/, 12]; + case 11: + if (e_12) throw e_12.error; + return [7 /*endfinally*/]; + case 12: return [7 /*endfinally*/]; + case 13: return [2 /*return*/]; } - finally { if (e_2) throw e_2.error; } - } + }); }); - } + }; /** * Returns an async iterable iterator to list all the containers * under the specified account. @@ -38787,11 +42118,14 @@ class BlobServiceClient extends StorageClient { * @param options - Options to list containers. * @returns An asyncIterableIterator that supports paging. */ - listContainers(options = {}) { + BlobServiceClient.prototype.listContainers = function (options) { + var _a; + var _this = this; + if (options === void 0) { options = {}; } if (options.prefix === "") { options.prefix = undefined; } - const include = []; + var include = []; if (options.includeDeleted) { include.push("deleted"); } @@ -38799,29 +42133,31 @@ class BlobServiceClient extends StorageClient { include.push("metadata"); } // AsyncIterableIterator to iterate over containers - const listSegmentOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include } : {})); - const iter = this.listItems(listSegmentOptions); - return { - /** - * The next method, part of the iteration protocol - */ - next() { - return iter.next(); + var listSegmentOptions = tslib.__assign(tslib.__assign({}, options), (include.length > 0 ? { include: include } : {})); + var iter = this.listItems(listSegmentOptions); + return _a = { + /** + * The next method, part of the iteration protocol + */ + next: function () { + return iter.next(); + } }, /** * The connection to the async iterator, part of the iteration protocol */ - [Symbol.asyncIterator]() { + _a[Symbol.asyncIterator] = function () { return this; }, /** * Return an AsyncIterableIterator that works a page at a time */ - byPage: (settings = {}) => { - return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); - } - }; - } + _a.byPage = function (settings) { + if (settings === void 0) { settings = {}; } + return _this.listSegments(settings.continuationToken, tslib.__assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + _a; + }; /** * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). * @@ -38833,36 +42169,49 @@ class BlobServiceClient extends StorageClient { * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time */ - async getUserDelegationKey(startsOn, expiresOn, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getUserDelegationKey", options); - try { - const response = await this.serviceContext.getUserDelegationKey({ - startsOn: truncatedISO8061Date(startsOn, false), - expiresOn: truncatedISO8061Date(expiresOn, false) - }, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - const userDelegationKey = { - signedObjectId: response.signedObjectId, - signedTenantId: response.signedTenantId, - signedStartsOn: new Date(response.signedStartsOn), - signedExpiresOn: new Date(response.signedExpiresOn), - signedService: response.signedService, - signedVersion: response.signedVersion, - value: response.value - }; - const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); - return res; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobServiceClient.prototype.getUserDelegationKey = function (startsOn, expiresOn, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, response, userDelegationKey, res, e_13; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobServiceClient-getUserDelegationKey", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.serviceContext.getUserDelegationKey({ + startsOn: truncatedISO8061Date(startsOn, false), + expiresOn: truncatedISO8061Date(expiresOn, false) + }, tslib.__assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: + response = _b.sent(); + userDelegationKey = { + signedObjectId: response.signedObjectId, + signedTenantId: response.signedTenantId, + signedStartsOn: new Date(response.signedStartsOn), + signedExpiresOn: new Date(response.signedExpiresOn), + signedService: response.signedService, + signedVersion: response.signedVersion, + value: response.value + }; + res = tslib.__assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); + return [2 /*return*/, res]; + case 3: + e_13 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_13.message + }); + throw e_13; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Creates a BlobBatchClient object to conduct batch operations. * @@ -38870,9 +42219,9 @@ class BlobServiceClient extends StorageClient { * * @returns A new BlobBatchClient object for this service. */ - getBlobBatchClient() { + BlobServiceClient.prototype.getBlobBatchClient = function () { return new BlobBatchClient(this.url, this.pipeline); - } + }; /** * Only available for BlobServiceClient constructed with a shared key credential. * @@ -38887,20 +42236,24 @@ class BlobServiceClient extends StorageClient { * @param options - Optional parameters. * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ - generateAccountSasUrl(expiresOn, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { + BlobServiceClient.prototype.generateAccountSasUrl = function (expiresOn, permissions, resourceTypes, options) { + if (permissions === void 0) { permissions = AccountSASPermissions.parse("r"); } + if (resourceTypes === void 0) { resourceTypes = "sco"; } + if (options === void 0) { options = {}; } if (!(this.credential instanceof StorageSharedKeyCredential)) { throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); } if (expiresOn === undefined) { - const now = new Date(); + var now = new Date(); expiresOn = new Date(now.getTime() + 3600 * 1000); } - const sas = generateAccountSASQueryParameters(Object.assign({ permissions, - expiresOn, - resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).toString(); + var sas = generateAccountSASQueryParameters(tslib.__assign({ permissions: permissions, + expiresOn: expiresOn, + resourceTypes: resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).toString(); return appendToURLQuery(this.url, sas); - } -} + }; + return BlobServiceClient; +}(StorageClient)); Object.defineProperty(exports, 'BaseRequestPolicy', { enumerable: true, @@ -40828,7 +44181,6 @@ Object.defineProperty(exports, "__esModule", { value: true }); exports.ProxyTracerProvider = void 0; var ProxyTracer_1 = __webpack_require__(398); var NoopTracerProvider_1 = __webpack_require__(162); -var NOOP_TRACER_PROVIDER = new NoopTracerProvider_1.NoopTracerProvider(); /** * Tracer provider which provides {@link ProxyTracer}s. * @@ -40849,7 +44201,7 @@ var ProxyTracerProvider = /** @class */ (function () { }; ProxyTracerProvider.prototype.getDelegate = function () { var _a; - return (_a = this._delegate) !== null && _a !== void 0 ? _a : NOOP_TRACER_PROVIDER; + return (_a = this._delegate) !== null && _a !== void 0 ? _a : NoopTracerProvider_1.NOOP_TRACER_PROVIDER; }; /** * Set the delegate tracer provider @@ -40907,7 +44259,6 @@ module.exports = function (Yallist) { Object.defineProperty(exports, "__esModule", { value: true }); exports.ProxyTracer = void 0; var NoopTracer_1 = __webpack_require__(151); -var NOOP_TRACER = new NoopTracer_1.NoopTracer(); /** * Proxy tracer provided by the proxy tracer provider */ @@ -40920,10 +44271,6 @@ var ProxyTracer = /** @class */ (function () { ProxyTracer.prototype.startSpan = function (name, options, context) { return this._getTracer().startSpan(name, options, context); }; - ProxyTracer.prototype.startActiveSpan = function (_name, _options, _context, _fn) { - var tracer = this._getTracer(); - return Reflect.apply(tracer.startActiveSpan, tracer, arguments); - }; /** * Try to get a tracer from the proxy tracer provider. * If the proxy tracer provider has no delegate, return a noop tracer. @@ -40934,7 +44281,7 @@ var ProxyTracer = /** @class */ (function () { } var tracer = this._provider.getDelegateTracer(this.name, this.version); if (!tracer) { - return NOOP_TRACER; + return NoopTracer_1.NOOP_TRACER; } this._delegate = tracer; return this._delegate; @@ -41488,10 +44835,12 @@ function parallel(list, iterator, callback) * See the License for the specific language governing permissions and * limitations under the License. */ -var __spreadArray = (this && this.__spreadArray) || function (to, from) { - for (var i = 0, il = from.length, j = to.length; i < il; i++, j++) - to[j] = from[i]; - return to; +var __spreadArrays = (this && this.__spreadArrays) || function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.NoopContextManager = void 0; @@ -41507,9 +44856,9 @@ var NoopContextManager = /** @class */ (function () { for (var _i = 3; _i < arguments.length; _i++) { args[_i - 3] = arguments[_i]; } - return fn.call.apply(fn, __spreadArray([thisArg], args)); + return fn.call.apply(fn, __spreadArrays([thisArg], args)); }; - NoopContextManager.prototype.bind = function (_context, target) { + NoopContextManager.prototype.bind = function (target, _context) { return target; }; NoopContextManager.prototype.enable = function () { @@ -41788,77 +45137,7 @@ exports.listTar = listTar; /***/ }), /* 435 */, /* 436 */, -/* 437 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.NonRecordingSpan = void 0; -var spancontext_utils_1 = __webpack_require__(629); -/** - * The NonRecordingSpan is the default {@link Span} that is used when no Span - * implementation is available. All operations are no-op including context - * propagation. - */ -var NonRecordingSpan = /** @class */ (function () { - function NonRecordingSpan(_spanContext) { - if (_spanContext === void 0) { _spanContext = spancontext_utils_1.INVALID_SPAN_CONTEXT; } - this._spanContext = _spanContext; - } - // Returns a SpanContext. - NonRecordingSpan.prototype.spanContext = function () { - return this._spanContext; - }; - // By default does nothing - NonRecordingSpan.prototype.setAttribute = function (_key, _value) { - return this; - }; - // By default does nothing - NonRecordingSpan.prototype.setAttributes = function (_attributes) { - return this; - }; - // By default does nothing - NonRecordingSpan.prototype.addEvent = function (_name, _attributes) { - return this; - }; - // By default does nothing - NonRecordingSpan.prototype.setStatus = function (_status) { - return this; - }; - // By default does nothing - NonRecordingSpan.prototype.updateName = function (_name) { - return this; - }; - // By default does nothing - NonRecordingSpan.prototype.end = function (_endTime) { }; - // isRecording always returns false for NonRecordingSpan. - NonRecordingSpan.prototype.isRecording = function () { - return false; - }; - // By default does nothing - NonRecordingSpan.prototype.recordException = function (_exception, _time) { }; - return NonRecordingSpan; -}()); -exports.NonRecordingSpan = NonRecordingSpan; -//# sourceMappingURL=NonRecordingSpan.js.map - -/***/ }), +/* 437 */, /* 438 */, /* 439 */, /* 440 */ @@ -41892,16 +45171,19 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) { for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.diag = exports.propagation = exports.trace = exports.context = exports.isValidSpanId = exports.isValidTraceId = exports.isSpanContextValid = exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = exports.baggageEntryMetadataFromString = void 0; -__exportStar(__webpack_require__(880), exports); -var utils_1 = __webpack_require__(112); -Object.defineProperty(exports, "baggageEntryMetadataFromString", { enumerable: true, get: function () { return utils_1.baggageEntryMetadataFromString; } }); +exports.diag = exports.propagation = exports.trace = exports.context = exports.isValidSpanId = exports.isValidTraceId = exports.isSpanContextValid = exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = void 0; +__exportStar(__webpack_require__(792), exports); __exportStar(__webpack_require__(452), exports); __exportStar(__webpack_require__(158), exports); __exportStar(__webpack_require__(893), exports); +__exportStar(__webpack_require__(918), exports); __exportStar(__webpack_require__(881), exports); __exportStar(__webpack_require__(906), exports); +__exportStar(__webpack_require__(727), exports); +__exportStar(__webpack_require__(851), exports); __exportStar(__webpack_require__(95), exports); +__exportStar(__webpack_require__(151), exports); +__exportStar(__webpack_require__(162), exports); __exportStar(__webpack_require__(398), exports); __exportStar(__webpack_require__(394), exports); __exportStar(__webpack_require__(781), exports); @@ -41911,6 +45193,7 @@ __exportStar(__webpack_require__(670), exports); __exportStar(__webpack_require__(59), exports); __exportStar(__webpack_require__(220), exports); __exportStar(__webpack_require__(932), exports); +__exportStar(__webpack_require__(839), exports); __exportStar(__webpack_require__(975), exports); __exportStar(__webpack_require__(70), exports); __exportStar(__webpack_require__(773), exports); @@ -41923,6 +45206,7 @@ Object.defineProperty(exports, "isSpanContextValid", { enumerable: true, get: fu Object.defineProperty(exports, "isValidTraceId", { enumerable: true, get: function () { return spancontext_utils_1.isValidTraceId; } }); Object.defineProperty(exports, "isValidSpanId", { enumerable: true, get: function () { return spancontext_utils_1.isValidSpanId; } }); __exportStar(__webpack_require__(132), exports); +__exportStar(__webpack_require__(425), exports); __exportStar(__webpack_require__(845), exports); var context_1 = __webpack_require__(77); /** Entrypoint for context API */ @@ -44558,65 +47842,7 @@ module.exports = validRange /***/ }), /* 481 */, /* 482 */, -/* 483 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.deleteBaggage = exports.setBaggage = exports.getBaggage = void 0; -var context_1 = __webpack_require__(132); -/** - * Baggage key - */ -var BAGGAGE_KEY = context_1.createContextKey('OpenTelemetry Baggage Key'); -/** - * Retrieve the current baggage from the given context - * - * @param {Context} Context that manage all context values - * @returns {Baggage} Extracted baggage from the context - */ -function getBaggage(context) { - return context.getValue(BAGGAGE_KEY) || undefined; -} -exports.getBaggage = getBaggage; -/** - * Store a baggage in the given context - * - * @param {Context} Context that manage all context values - * @param {Baggage} baggage that will be set in the actual context - */ -function setBaggage(context, baggage) { - return context.setValue(BAGGAGE_KEY, baggage); -} -exports.setBaggage = setBaggage; -/** - * Delete the baggage stored in the given context - * - * @param {Context} Context that manage all context values - */ -function deleteBaggage(context) { - return context.deleteValue(BAGGAGE_KEY); -} -exports.deleteBaggage = deleteBaggage; -//# sourceMappingURL=context-helpers.js.map - -/***/ }), +/* 483 */, /* 484 */, /* 485 */, /* 486 */ @@ -45256,33 +48482,33 @@ module.exports = {"application/1d-interleaved-parityfec":{"source":"iana"},"appl */ Object.defineProperty(exports, "__esModule", { value: true }); exports.unregisterGlobal = exports.getGlobal = exports.registerGlobal = void 0; +var __1 = __webpack_require__(440); var platform_1 = __webpack_require__(910); var version_1 = __webpack_require__(133); var semver_1 = __webpack_require__(987); var major = version_1.VERSION.split('.')[0]; -var GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for("opentelemetry.js.api." + major); +var GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for("io.opentelemetry.js.api." + major); var _global = platform_1._globalThis; -function registerGlobal(type, instance, diag, allowOverride) { +function registerGlobal(type, instance, allowOverride) { var _a; if (allowOverride === void 0) { allowOverride = false; } - var api = (_global[GLOBAL_OPENTELEMETRY_API_KEY] = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) !== null && _a !== void 0 ? _a : { + _global[GLOBAL_OPENTELEMETRY_API_KEY] = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) !== null && _a !== void 0 ? _a : { version: version_1.VERSION, - }); + }; + var api = _global[GLOBAL_OPENTELEMETRY_API_KEY]; if (!allowOverride && api[type]) { // already registered an API of this type var err = new Error("@opentelemetry/api: Attempted duplicate registration of API: " + type); - diag.error(err.stack || err.message); - return false; + __1.diag.error(err.stack || err.message); + return; } if (api.version !== version_1.VERSION) { // All registered APIs must be of the same version exactly var err = new Error('@opentelemetry/api: All API registration versions must match'); - diag.error(err.stack || err.message); - return false; + __1.diag.error(err.stack || err.message); + return; } api[type] = instance; - diag.debug("@opentelemetry/api: Registered a global for " + type + " v" + version_1.VERSION + "."); - return true; } exports.registerGlobal = registerGlobal; function getGlobal(type) { @@ -45294,8 +48520,7 @@ function getGlobal(type) { return (_b = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _b === void 0 ? void 0 : _b[type]; } exports.getGlobal = getGlobal; -function unregisterGlobal(type, diag) { - diag.debug("@opentelemetry/api: Unregistering a global for " + type + " v" + version_1.VERSION + "."); +function unregisterGlobal(type) { var api = _global[GLOBAL_OPENTELEMETRY_API_KEY]; if (api) { delete api[type]; @@ -49171,7 +52396,76 @@ exports.partialMatch = partialMatch; /***/ }), /* 598 */, -/* 599 */, +/* 599 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BaggageImpl = void 0; +var BaggageImpl = /** @class */ (function () { + function BaggageImpl(entries) { + this._entries = entries ? new Map(entries) : new Map(); + } + BaggageImpl.prototype.getEntry = function (key) { + var entry = this._entries.get(key); + if (!entry) { + return undefined; + } + return Object.assign({}, entry); + }; + BaggageImpl.prototype.getAllEntries = function () { + return Array.from(this._entries.entries()).map(function (_a) { + var k = _a[0], v = _a[1]; + return [k, v]; + }); + }; + BaggageImpl.prototype.setEntry = function (key, entry) { + var newBaggage = new BaggageImpl(this._entries); + newBaggage._entries.set(key, entry); + return newBaggage; + }; + BaggageImpl.prototype.removeEntry = function (key) { + var newBaggage = new BaggageImpl(this._entries); + newBaggage._entries.delete(key); + return newBaggage; + }; + BaggageImpl.prototype.removeEntries = function () { + var keys = []; + for (var _i = 0; _i < arguments.length; _i++) { + keys[_i] = arguments[_i]; + } + var newBaggage = new BaggageImpl(this._entries); + for (var _a = 0, keys_1 = keys; _a < keys_1.length; _a++) { + var key = keys_1[_a]; + newBaggage._entries.delete(key); + } + return newBaggage; + }; + BaggageImpl.prototype.clear = function () { + return new BaggageImpl(); + }; + return BaggageImpl; +}()); +exports.BaggageImpl = BaggageImpl; +//# sourceMappingURL=baggage.js.map + +/***/ }), /* 600 */, /* 601 */ /***/ (function(__unusedmodule, exports, __webpack_require__) { @@ -50117,23 +53411,7 @@ module.exports = require("path"); "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); -exports.wrapSpanContext = exports.isSpanContextValid = exports.isValidSpanId = exports.isValidTraceId = exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = void 0; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -var NonRecordingSpan_1 = __webpack_require__(437); +exports.isSpanContextValid = exports.isValidSpanId = exports.isValidTraceId = exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = void 0; var trace_flags_1 = __webpack_require__(975); var VALID_TRACEID_REGEX = /^([0-9a-f]{32})$/i; var VALID_SPANID_REGEX = /^[0-9a-f]{16}$/i; @@ -50160,16 +53438,6 @@ function isSpanContextValid(spanContext) { return (isValidTraceId(spanContext.traceId) && isValidSpanId(spanContext.spanId)); } exports.isSpanContextValid = isSpanContextValid; -/** - * Wrap the given {@link SpanContext} in a new non-recording {@link Span} - * - * @param spanContext span context to be wrapped - * @returns a new non-recording {@link Span} with the provided context - */ -function wrapSpanContext(spanContext) { - return new NonRecordingSpan_1.NonRecordingSpan(spanContext); -} -exports.wrapSpanContext = wrapSpanContext; //# sourceMappingURL=spancontext-utils.js.map /***/ }), @@ -50495,7 +53763,7 @@ var __createBinding; ar[i] = from[i]; } } - return to.concat(ar || Array.prototype.slice.call(from)); + return to.concat(ar || from); }; __await = function (v) { @@ -52167,7 +55435,30 @@ var __createBinding; /***/ }), -/* 646 */, +/* 646 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=Entry.js.map + +/***/ }), /* 647 */, /* 648 */, /* 649 */, @@ -52573,76 +55864,7 @@ exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator; /* 663 */, /* 664 */, /* 665 */, -/* 666 */ -/***/ (function(__unusedmodule, exports) { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.BaggageImpl = void 0; -var BaggageImpl = /** @class */ (function () { - function BaggageImpl(entries) { - this._entries = entries ? new Map(entries) : new Map(); - } - BaggageImpl.prototype.getEntry = function (key) { - var entry = this._entries.get(key); - if (!entry) { - return undefined; - } - return Object.assign({}, entry); - }; - BaggageImpl.prototype.getAllEntries = function () { - return Array.from(this._entries.entries()).map(function (_a) { - var k = _a[0], v = _a[1]; - return [k, v]; - }); - }; - BaggageImpl.prototype.setEntry = function (key, entry) { - var newBaggage = new BaggageImpl(this._entries); - newBaggage._entries.set(key, entry); - return newBaggage; - }; - BaggageImpl.prototype.removeEntry = function (key) { - var newBaggage = new BaggageImpl(this._entries); - newBaggage._entries.delete(key); - return newBaggage; - }; - BaggageImpl.prototype.removeEntries = function () { - var keys = []; - for (var _i = 0; _i < arguments.length; _i++) { - keys[_i] = arguments[_i]; - } - var newBaggage = new BaggageImpl(this._entries); - for (var _a = 0, keys_1 = keys; _a < keys_1.length; _a++) { - var key = keys_1[_a]; - newBaggage._entries.delete(key); - } - return newBaggage; - }; - BaggageImpl.prototype.clear = function () { - return new BaggageImpl(); - }; - return BaggageImpl; -}()); -exports.BaggageImpl = BaggageImpl; -//# sourceMappingURL=baggage-impl.js.map - -/***/ }), +/* 666 */, /* 667 */, /* 668 */, /* 669 */ @@ -53687,86 +56909,7 @@ module.exports = valid /* 717 */, /* 718 */, /* 719 */, -/* 720 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.getSpanContext = exports.setSpanContext = exports.deleteSpan = exports.setSpan = exports.getSpan = void 0; -var context_1 = __webpack_require__(132); -var NonRecordingSpan_1 = __webpack_require__(437); -/** - * span key - */ -var SPAN_KEY = context_1.createContextKey('OpenTelemetry Context Key SPAN'); -/** - * Return the span if one exists - * - * @param context context to get span from - */ -function getSpan(context) { - return context.getValue(SPAN_KEY) || undefined; -} -exports.getSpan = getSpan; -/** - * Set the span on a context - * - * @param context context to use as parent - * @param span span to set active - */ -function setSpan(context, span) { - return context.setValue(SPAN_KEY, span); -} -exports.setSpan = setSpan; -/** - * Remove current span stored in the context - * - * @param context context to delete span from - */ -function deleteSpan(context) { - return context.deleteValue(SPAN_KEY); -} -exports.deleteSpan = deleteSpan; -/** - * Wrap span context in a NoopSpan and set as span in a new - * context - * - * @param context context to set active span on - * @param spanContext span context to be wrapped - */ -function setSpanContext(context, spanContext) { - return setSpan(context, new NonRecordingSpan_1.NonRecordingSpan(spanContext)); -} -exports.setSpanContext = setSpanContext; -/** - * Get the span context of the span if it exists. - * - * @param context context to get values from - */ -function getSpanContext(context) { - var _a; - return (_a = getSpan(context)) === null || _a === void 0 ? void 0 : _a.spanContext(); -} -exports.getSpanContext = getSpanContext; -//# sourceMappingURL=context-utils.js.map - -/***/ }), +/* 720 */, /* 721 */, /* 722 */ /***/ (function(module) { @@ -53823,9 +56966,343 @@ module.exports = bytesToUuid; /***/ }), -/* 725 */, +/* 725 */ +/***/ (function(module) { + +/*! ***************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, System, Reflect, Promise */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __createBinding; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if ( true && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || from); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); +}); + + +/***/ }), /* 726 */, -/* 727 */, +/* 727 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=Event.js.map + +/***/ }), /* 728 */ /***/ (function(__unusedmodule, exports) { @@ -54739,7 +58216,77 @@ module.exports = function(dst, src) { /***/ }), -/* 767 */, +/* 767 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NoopSpan = void 0; +var spancontext_utils_1 = __webpack_require__(629); +/** + * The NoopSpan is the default {@link Span} that is used when no Span + * implementation is available. All operations are no-op including context + * propagation. + */ +var NoopSpan = /** @class */ (function () { + function NoopSpan(_spanContext) { + if (_spanContext === void 0) { _spanContext = spancontext_utils_1.INVALID_SPAN_CONTEXT; } + this._spanContext = _spanContext; + } + // Returns a SpanContext. + NoopSpan.prototype.context = function () { + return this._spanContext; + }; + // By default does nothing + NoopSpan.prototype.setAttribute = function (_key, _value) { + return this; + }; + // By default does nothing + NoopSpan.prototype.setAttributes = function (_attributes) { + return this; + }; + // By default does nothing + NoopSpan.prototype.addEvent = function (_name, _attributes) { + return this; + }; + // By default does nothing + NoopSpan.prototype.setStatus = function (_status) { + return this; + }; + // By default does nothing + NoopSpan.prototype.updateName = function (_name) { + return this; + }; + // By default does nothing + NoopSpan.prototype.end = function (_endTime) { }; + // isRecording always returns false for noopSpan. + NoopSpan.prototype.isRecording = function () { + return false; + }; + // By default does nothing + NoopSpan.prototype.recordException = function (_exception, _time) { }; + return NoopSpan; +}()); +exports.NoopSpan = NoopSpan; +//# sourceMappingURL=NoopSpan.js.map + +/***/ }), /* 768 */ /***/ (function(module, __unusedexports, __webpack_require__) { @@ -56117,7 +59664,74 @@ FormData.prototype.toString = function () { /***/ }), -/* 792 */, +/* 792 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.baggageEntryMetadataFromString = exports.createBaggage = void 0; +var baggage_1 = __webpack_require__(599); +var symbol_1 = __webpack_require__(561); +__exportStar(__webpack_require__(938), exports); +__exportStar(__webpack_require__(646), exports); +/** + * Create a new Baggage with optional entries + * + * @param entries An array of baggage entries the new baggage should contain + */ +function createBaggage(entries) { + if (entries === void 0) { entries = {}; } + return new baggage_1.BaggageImpl(new Map(Object.entries(entries))); +} +exports.createBaggage = createBaggage; +/** + * Create a serializable BaggageEntryMetadata object from a string. + * + * @param str string metadata. Format is currently not defined by the spec and has no special meaning. + * + */ +function baggageEntryMetadataFromString(str) { + if (typeof str !== 'string') { + // @TODO log diagnostic + str = ''; + } + return { + __TYPE__: symbol_1.baggageEntryMetadataSymbol, + toString: function () { + return str; + }, + }; +} +exports.baggageEntryMetadataFromString = baggageEntryMetadataFromString; +//# sourceMappingURL=index.js.map + +/***/ }), /* 793 */, /* 794 */ /***/ (function(module) { @@ -56761,7 +60375,7 @@ var __createBinding; ar[i] = from[i]; } } - return to.concat(ar || Array.prototype.slice.call(from)); + return to.concat(ar || from); }; __await = function (v) { @@ -56991,7 +60605,30 @@ module.exports = require("url"); /* 836 */, /* 837 */, /* 838 */, -/* 839 */, +/* 839 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=TimedEvent.js.map + +/***/ }), /* 840 */, /* 841 */, /* 842 */, @@ -57026,7 +60663,30 @@ Object.defineProperty(exports, "__esModule", { value: true }); /* 848 */, /* 849 */, /* 850 */, -/* 851 */, +/* 851 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=link_context.js.map + +/***/ }), /* 852 */ /***/ (function(module, __unusedexports, __webpack_require__) { @@ -57555,7 +61215,7 @@ var __createBinding; ar[i] = from[i]; } } - return to.concat(ar || Array.prototype.slice.call(from)); + return to.concat(ar || from); }; __await = function (v) { @@ -57701,11 +61361,9 @@ module.exports = compare */ Object.defineProperty(exports, "__esModule", { value: true }); exports.TraceAPI = void 0; -var global_utils_1 = __webpack_require__(525); var ProxyTracerProvider_1 = __webpack_require__(394); var spancontext_utils_1 = __webpack_require__(629); -var context_utils_1 = __webpack_require__(720); -var diag_1 = __webpack_require__(118); +var global_utils_1 = __webpack_require__(525); var API_NAME = 'trace'; /** * Singleton object which represents the entry point to the OpenTelemetry Tracing API @@ -57714,13 +61372,7 @@ var TraceAPI = /** @class */ (function () { /** Empty private constructor prevents end users from constructing a new instance of the API */ function TraceAPI() { this._proxyTracerProvider = new ProxyTracerProvider_1.ProxyTracerProvider(); - this.wrapSpanContext = spancontext_utils_1.wrapSpanContext; this.isSpanContextValid = spancontext_utils_1.isSpanContextValid; - this.deleteSpan = context_utils_1.deleteSpan; - this.getSpan = context_utils_1.getSpan; - this.getSpanContext = context_utils_1.getSpanContext; - this.setSpan = context_utils_1.setSpan; - this.setSpanContext = context_utils_1.setSpanContext; } /** Get the singleton instance of the Trace API */ TraceAPI.getInstance = function () { @@ -57730,16 +61382,12 @@ var TraceAPI = /** @class */ (function () { return this._instance; }; /** - * Set the current global tracer. - * - * @returns true if the tracer provider was successfully registered, else false + * Set the current global tracer. Returns the initialized global tracer provider */ TraceAPI.prototype.setGlobalTracerProvider = function (provider) { - var success = global_utils_1.registerGlobal(API_NAME, this._proxyTracerProvider, diag_1.DiagAPI.instance()); - if (success) { - this._proxyTracerProvider.setDelegate(provider); - } - return success; + this._proxyTracerProvider.setDelegate(provider); + global_utils_1.registerGlobal(API_NAME, this._proxyTracerProvider); + return this._proxyTracerProvider; }; /** * Returns the global tracer provider. @@ -57755,7 +61403,7 @@ var TraceAPI = /** @class */ (function () { }; /** Remove the global tracer provider */ TraceAPI.prototype.disable = function () { - global_utils_1.unregisterGlobal(API_NAME, diag_1.DiagAPI.instance()); + global_utils_1.unregisterGlobal(API_NAME); this._proxyTracerProvider = new ProxyTracerProvider_1.ProxyTracerProvider(); }; return TraceAPI; @@ -57870,30 +61518,7 @@ module.exports = (versions, range, options) => { /***/ }), /* 878 */, /* 879 */, -/* 880 */ -/***/ (function(__unusedmodule, exports) { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=types.js.map - -/***/ }), +/* 880 */, /* 881 */ /***/ (function(__unusedmodule, exports) { @@ -60014,36 +63639,48 @@ exports.deleteKey = deleteKey; /***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; +/*! + * Copyright (c) Microsoft and contributors. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for + * license information. + * + * Azure Core LRO SDK for JavaScript - 1.0.5 + */ Object.defineProperty(exports, '__esModule', { value: true }); -var logger$1 = __webpack_require__(492); +var tslib = __webpack_require__(725); // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. /** * When a poller is manually stopped through the `stopPolling` method, * the poller will be rejected with an instance of the PollerStoppedError. */ -class PollerStoppedError extends Error { - constructor(message) { - super(message); - this.name = "PollerStoppedError"; - Object.setPrototypeOf(this, PollerStoppedError.prototype); +var PollerStoppedError = /** @class */ (function (_super) { + tslib.__extends(PollerStoppedError, _super); + function PollerStoppedError(message) { + var _this = _super.call(this, message) || this; + _this.name = "PollerStoppedError"; + Object.setPrototypeOf(_this, PollerStoppedError.prototype); + return _this; } -} + return PollerStoppedError; +}(Error)); /** * When a poller is cancelled through the `cancelOperation` method, * the poller will be rejected with an instance of the PollerCancelledError. */ -class PollerCancelledError extends Error { - constructor(message) { - super(message); - this.name = "PollerCancelledError"; - Object.setPrototypeOf(this, PollerCancelledError.prototype); +var PollerCancelledError = /** @class */ (function (_super) { + tslib.__extends(PollerCancelledError, _super); + function PollerCancelledError(message) { + var _this = _super.call(this, message) || this; + _this.name = "PollerCancelledError"; + Object.setPrototypeOf(_this, PollerCancelledError.prototype); + return _this; } -} + return PollerCancelledError; +}(Error)); /** * A class that represents the definition of a program that polls through consecutive requests * until it reaches a state of completion. @@ -60106,7 +63743,7 @@ class PollerCancelledError extends Error { * */ // eslint-disable-next-line no-use-before-define -class Poller { +var Poller = /** @class */ (function () { /** * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. * @@ -60172,18 +63809,19 @@ class Poller { * * @param operation - Must contain the basic properties of `PollOperation`. */ - constructor(operation) { + function Poller(operation) { + var _this = this; this.stopped = true; this.pollProgressCallbacks = []; this.operation = operation; - this.promise = new Promise((resolve, reject) => { - this.resolve = resolve; - this.reject = reject; + this.promise = new Promise(function (resolve, reject) { + _this.resolve = resolve; + _this.reject = reject; }); // This prevents the UnhandledPromiseRejectionWarning in node.js from being thrown. // The above warning would get thrown if `poller.poll` is called, it returns an error, // and pullUntilDone did not have a .catch or await try/catch on it's return value. - this.promise.catch(() => { + this.promise.catch(function () { /* intentionally blank */ }); } @@ -60192,15 +63830,29 @@ class Poller { * Starts a loop that will break only if the poller is done * or if the poller is stopped. */ - async startPolling() { - if (this.stopped) { - this.stopped = false; - } - while (!this.isStopped() && !this.isDone()) { - await this.poll(); - await this.delay(); - } - } + Poller.prototype.startPolling = function () { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + if (this.stopped) { + this.stopped = false; + } + _a.label = 1; + case 1: + if (!(!this.isStopped() && !this.isDone())) return [3 /*break*/, 4]; + return [4 /*yield*/, this.poll()]; + case 2: + _a.sent(); + return [4 /*yield*/, this.delay()]; + case 3: + _a.sent(); + return [3 /*break*/, 1]; + case 4: return [2 /*return*/]; + } + }); + }); + }; /** * @internal * pollOnce does one polling, by calling to the update method of the underlying @@ -60210,31 +63862,44 @@ class Poller { * * @param options - Optional properties passed to the operation's update method. */ - async pollOnce(options = {}) { - try { - if (!this.isDone()) { - this.operation = await this.operation.update({ - abortSignal: options.abortSignal, - fireProgress: this.fireProgress.bind(this) - }); - if (this.isDone() && this.resolve) { - // If the poller has finished polling, this means we now have a result. - // However, it can be the case that TResult is instantiated to void, so - // we are not expecting a result anyway. To assert that we might not - // have a result eventually after finishing polling, we cast the result - // to TResult. - this.resolve(this.operation.state.result); + Poller.prototype.pollOnce = function (options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, e_1; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _b.trys.push([0, 3, , 4]); + if (!!this.isDone()) return [3 /*break*/, 2]; + _a = this; + return [4 /*yield*/, this.operation.update({ + abortSignal: options.abortSignal, + fireProgress: this.fireProgress.bind(this) + })]; + case 1: + _a.operation = _b.sent(); + if (this.isDone() && this.resolve) { + // If the poller has finished polling, this means we now have a result. + // However, it can be the case that TResult is instantiated to void, so + // we are not expecting a result anyway. To assert that we might not + // have a result eventually after finishing polling, we cast the result + // to TResult. + this.resolve(this.operation.state.result); + } + _b.label = 2; + case 2: return [3 /*break*/, 4]; + case 3: + e_1 = _b.sent(); + this.operation.state.error = e_1; + if (this.reject) { + this.reject(e_1); + } + throw e_1; + case 4: return [2 /*return*/]; } - } - } - catch (e) { - this.operation.state.error = e; - if (this.reject) { - this.reject(e); - } - throw e; - } - } + }); + }); + }; /** * @internal * fireProgress calls the functions passed in via onProgress the method of the poller. @@ -60244,22 +63909,36 @@ class Poller { * * @param state - The current operation state. */ - fireProgress(state) { - for (const callback of this.pollProgressCallbacks) { + Poller.prototype.fireProgress = function (state) { + for (var _i = 0, _a = this.pollProgressCallbacks; _i < _a.length; _i++) { + var callback = _a[_i]; callback(state); } - } + }; /** * @internal * Invokes the underlying operation's cancel method, and rejects the * pollUntilDone promise. */ - async cancelOnce(options = {}) { - this.operation = await this.operation.cancel(options); - if (this.reject) { - this.reject(new PollerCancelledError("Poller cancelled")); - } - } + Poller.prototype.cancelOnce = function (options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = this; + return [4 /*yield*/, this.operation.cancel(options)]; + case 1: + _a.operation = _b.sent(); + if (this.reject) { + this.reject(new PollerCancelledError("Poller cancelled")); + } + return [2 /*return*/]; + } + }); + }); + }; /** * Returns a promise that will resolve once a single polling request finishes. * It does this by calling the update method of the Poller's operation. @@ -60268,61 +63947,68 @@ class Poller { * * @param options - Optional properties passed to the operation's update method. */ - poll(options = {}) { + Poller.prototype.poll = function (options) { + var _this = this; + if (options === void 0) { options = {}; } if (!this.pollOncePromise) { this.pollOncePromise = this.pollOnce(options); - const clearPollOncePromise = () => { - this.pollOncePromise = undefined; + var clearPollOncePromise = function () { + _this.pollOncePromise = undefined; }; this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject); } return this.pollOncePromise; - } + }; /** * Returns a promise that will resolve once the underlying operation is completed. */ - async pollUntilDone() { - if (this.stopped) { - this.startPolling().catch(this.reject); - } - return this.promise; - } + Poller.prototype.pollUntilDone = function () { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + if (this.stopped) { + this.startPolling().catch(this.reject); + } + return [2 /*return*/, this.promise]; + }); + }); + }; /** * Invokes the provided callback after each polling is completed, * sending the current state of the poller's operation. * * It returns a method that can be used to stop receiving updates on the given callback function. */ - onProgress(callback) { + Poller.prototype.onProgress = function (callback) { + var _this = this; this.pollProgressCallbacks.push(callback); - return () => { - this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback); + return function () { + _this.pollProgressCallbacks = _this.pollProgressCallbacks.filter(function (c) { return c !== callback; }); }; - } + }; /** * Returns true if the poller has finished polling. */ - isDone() { - const state = this.operation.state; + Poller.prototype.isDone = function () { + var state = this.operation.state; return Boolean(state.isCompleted || state.isCancelled || state.error); - } + }; /** * Stops the poller from continuing to poll. */ - stopPolling() { + Poller.prototype.stopPolling = function () { if (!this.stopped) { this.stopped = true; if (this.reject) { this.reject(new PollerStoppedError("This poller is already stopped")); } } - } + }; /** * Returns true if the poller is stopped. */ - isStopped() { + Poller.prototype.isStopped = function () { return this.stopped; - } + }; /** * Attempts to cancel the underlying operation. * @@ -60332,7 +64018,8 @@ class Poller { * * @param options - Optional properties passed to the operation's update method. */ - cancelOperation(options = {}) { + Poller.prototype.cancelOperation = function (options) { + if (options === void 0) { options = {}; } if (!this.stopped) { this.stopped = true; } @@ -60343,7 +64030,7 @@ class Poller { throw new Error("A cancel request is currently pending"); } return this.cancelPromise; - } + }; /** * Returns the state of the operation. * @@ -60392,379 +64079,29 @@ class Poller { * `../test/utils/testPoller.ts` * and look for the getOperationState implementation. */ - getOperationState() { + Poller.prototype.getOperationState = function () { return this.operation.state; - } + }; /** * Returns the result value of the operation, * regardless of the state of the poller. * It can return undefined or an incomplete form of the final TResult value * depending on the implementation. */ - getResult() { - const state = this.operation.state; + Poller.prototype.getResult = function () { + var state = this.operation.state; return state.result; - } + }; /** * Returns a serialized version of the poller's operation * by invoking the operation's toString method. */ - toString() { + Poller.prototype.toString = function () { return this.operation.toString(); - } -} - -// Copyright (c) Microsoft Corporation. -/** - * The `@azure/logger` configuration for this package. - * @internal - */ -const logger = logger$1.createClientLogger("core-lro"); - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Detects where the continuation token is and returns it. Notice that azure-asyncoperation - * must be checked first before the other location headers because there are scenarios - * where both azure-asyncoperation and location could be present in the same response but - * azure-asyncoperation should be the one to use for polling. - */ -function getPollingUrl(rawResponse, defaultPath) { - var _a, _b, _c; - return ((_c = (_b = (_a = getAzureAsyncOperation(rawResponse)) !== null && _a !== void 0 ? _a : getLocation(rawResponse)) !== null && _b !== void 0 ? _b : getOperationLocation(rawResponse)) !== null && _c !== void 0 ? _c : defaultPath); -} -function getLocation(rawResponse) { - return rawResponse.headers["location"]; -} -function getOperationLocation(rawResponse) { - return rawResponse.headers["operation-location"]; -} -function getAzureAsyncOperation(rawResponse) { - return rawResponse.headers["azure-asyncoperation"]; -} -function inferLroMode(requestPath, requestMethod, rawResponse) { - if (getAzureAsyncOperation(rawResponse) !== undefined) { - return { - mode: "AzureAsync", - resourceLocation: requestMethod === "PUT" - ? requestPath - : requestMethod === "POST" - ? getLocation(rawResponse) - : undefined - }; - } - else if (getLocation(rawResponse) !== undefined || - getOperationLocation(rawResponse) !== undefined) { - return { - mode: "Location" - }; - } - else if (["PUT", "PATCH"].includes(requestMethod)) { - return { - mode: "Body" - }; - } - return {}; -} -class SimpleRestError extends Error { - constructor(message, statusCode) { - super(message); - this.name = "RestError"; - this.statusCode = statusCode; - Object.setPrototypeOf(this, SimpleRestError.prototype); - } -} -function isUnexpectedInitialResponse(rawResponse) { - const code = rawResponse.statusCode; - if (![203, 204, 202, 201, 200, 500].includes(code)) { - throw new SimpleRestError(`Received unexpected HTTP status code ${code} in the initial response. This may indicate a server issue.`, code); - } - return false; -} -function isUnexpectedPollingResponse(rawResponse) { - const code = rawResponse.statusCode; - if (![202, 201, 200, 500].includes(code)) { - throw new SimpleRestError(`Received unexpected HTTP status code ${code} while polling. This may indicate a server issue.`, code); - } - return false; -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const successStates = ["succeeded"]; -const failureStates = ["failed", "canceled", "cancelled"]; - -// Copyright (c) Microsoft Corporation. -function getResponseStatus(rawResponse) { - var _a, _b; - const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; - return (_b = status === null || status === void 0 ? void 0 : status.toLowerCase()) !== null && _b !== void 0 ? _b : "succeeded"; -} -function isAzureAsyncPollingDone(rawResponse) { - const state = getResponseStatus(rawResponse); - if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) { - throw new Error(`The long running operation has failed. The provisioning state: ${state}.`); - } - return successStates.includes(state); -} -/** - * Sends a request to the URI of the provisioned resource if needed. - */ -async function sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig) { - switch (lroResourceLocationConfig) { - case "original-uri": - return lro.sendPollRequest(lro.requestPath); - case "azure-async-operation": - return undefined; - case "location": - default: - return lro.sendPollRequest(resourceLocation !== null && resourceLocation !== void 0 ? resourceLocation : lro.requestPath); - } -} -function processAzureAsyncOperationResult(lro, resourceLocation, lroResourceLocationConfig) { - return (response) => { - if (isAzureAsyncPollingDone(response.rawResponse)) { - if (resourceLocation === undefined) { - return Object.assign(Object.assign({}, response), { done: true }); - } - else { - return Object.assign(Object.assign({}, response), { done: false, next: async () => { - const finalResponse = await sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig); - return Object.assign(Object.assign({}, (finalResponse !== null && finalResponse !== void 0 ? finalResponse : response)), { done: true }); - } }); - } - } - return Object.assign(Object.assign({}, response), { done: false }); }; -} - -// Copyright (c) Microsoft Corporation. -function getProvisioningState(rawResponse) { - var _a, _b, _c; - const { properties, provisioningState } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; - const state = (_b = properties === null || properties === void 0 ? void 0 : properties.provisioningState) !== null && _b !== void 0 ? _b : provisioningState; - return (_c = state === null || state === void 0 ? void 0 : state.toLowerCase()) !== null && _c !== void 0 ? _c : "succeeded"; -} -function isBodyPollingDone(rawResponse) { - const state = getProvisioningState(rawResponse); - if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) { - throw new Error(`The long running operation has failed. The provisioning state: ${state}.`); - } - return successStates.includes(state); -} -/** - * Creates a polling strategy based on BodyPolling which uses the provisioning state - * from the result to determine the current operation state - */ -function processBodyPollingOperationResult(response) { - return Object.assign(Object.assign({}, response), { done: isBodyPollingDone(response.rawResponse) }); -} - -// Copyright (c) Microsoft Corporation. -function isLocationPollingDone(rawResponse) { - return !isUnexpectedPollingResponse(rawResponse) && rawResponse.statusCode !== 202; -} -function processLocationPollingOperationResult(response) { - return Object.assign(Object.assign({}, response), { done: isLocationPollingDone(response.rawResponse) }); -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -function processPassthroughOperationResult(response) { - return Object.assign(Object.assign({}, response), { done: true }); -} - -// Copyright (c) Microsoft Corporation. -/** - * creates a stepping function that maps an LRO state to another. - */ -function createGetLroStatusFromResponse(lroPrimitives, config, lroResourceLocationConfig) { - switch (config.mode) { - case "AzureAsync": { - return processAzureAsyncOperationResult(lroPrimitives, config.resourceLocation, lroResourceLocationConfig); - } - case "Location": { - return processLocationPollingOperationResult; - } - case "Body": { - return processBodyPollingOperationResult; - } - default: { - return processPassthroughOperationResult; - } - } -} -/** - * Creates a polling operation. - */ -function createPoll(lroPrimitives) { - return async (path, pollerConfig, getLroStatusFromResponse) => { - const response = await lroPrimitives.sendPollRequest(path); - const retryAfter = response.rawResponse.headers["retry-after"]; - if (retryAfter !== undefined) { - const retryAfterInMs = parseInt(retryAfter); - pollerConfig.intervalInMs = isNaN(retryAfterInMs) - ? calculatePollingIntervalFromDate(new Date(retryAfter), pollerConfig.intervalInMs) - : retryAfterInMs; - } - return getLroStatusFromResponse(response); - }; -} -function calculatePollingIntervalFromDate(retryAfterDate, defaultIntervalInMs) { - const timeNow = Math.floor(new Date().getTime()); - const retryAfterTime = retryAfterDate.getTime(); - if (timeNow < retryAfterTime) { - return retryAfterTime - timeNow; - } - return defaultIntervalInMs; -} -/** - * Creates a callback to be used to initialize the polling operation state. - * @param state - of the polling operation - * @param operationSpec - of the LRO - * @param callback - callback to be called when the operation is done - * @returns callback that initializes the state of the polling operation - */ -function createInitializeState(state, requestPath, requestMethod) { - return (response) => { - if (isUnexpectedInitialResponse(response.rawResponse)) - ; - state.initialRawResponse = response.rawResponse; - state.isStarted = true; - state.pollingURL = getPollingUrl(state.initialRawResponse, requestPath); - state.config = inferLroMode(requestPath, requestMethod, state.initialRawResponse); - /** short circuit polling if body polling is done in the initial request */ - if (state.config.mode === undefined || - (state.config.mode === "Body" && isBodyPollingDone(state.initialRawResponse))) { - state.result = response.flatResponse; - state.isCompleted = true; - } - logger.verbose(`LRO: initial state: ${JSON.stringify(state)}`); - return Boolean(state.isCompleted); - }; -} - -// Copyright (c) Microsoft Corporation. -class GenericPollOperation { - constructor(state, lro, lroResourceLocationConfig, processResult, updateState, isDone) { - this.state = state; - this.lro = lro; - this.lroResourceLocationConfig = lroResourceLocationConfig; - this.processResult = processResult; - this.updateState = updateState; - this.isDone = isDone; - } - setPollerConfig(pollerConfig) { - this.pollerConfig = pollerConfig; - } - /** - * General update function for LROPoller, the general process is as follows - * 1. Check initial operation result to determine the strategy to use - * - Strategies: Location, Azure-AsyncOperation, Original Uri - * 2. Check if the operation result has a terminal state - * - Terminal state will be determined by each strategy - * 2.1 If it is terminal state Check if a final GET request is required, if so - * send final GET request and return result from operation. If no final GET - * is required, just return the result from operation. - * - Determining what to call for final request is responsibility of each strategy - * 2.2 If it is not terminal state, call the polling operation and go to step 1 - * - Determining what to call for polling is responsibility of each strategy - * - Strategies will always use the latest URI for polling if provided otherwise - * the last known one - */ - async update(options) { - var _a, _b, _c; - const state = this.state; - let lastResponse = undefined; - if (!state.isStarted) { - const initializeState = createInitializeState(state, this.lro.requestPath, this.lro.requestMethod); - lastResponse = await this.lro.sendInitialRequest(); - initializeState(lastResponse); - } - if (!state.isCompleted) { - if (!this.poll || !this.getLroStatusFromResponse) { - if (!state.config) { - throw new Error("Bad state: LRO mode is undefined. Please check if the serialized state is well-formed."); - } - const isDone = this.isDone; - this.getLroStatusFromResponse = isDone - ? (response) => (Object.assign(Object.assign({}, response), { done: isDone(response.flatResponse, this.state) })) - : createGetLroStatusFromResponse(this.lro, state.config, this.lroResourceLocationConfig); - this.poll = createPoll(this.lro); - } - if (!state.pollingURL) { - throw new Error("Bad state: polling URL is undefined. Please check if the serialized state is well-formed."); - } - const currentState = await this.poll(state.pollingURL, this.pollerConfig, this.getLroStatusFromResponse); - logger.verbose(`LRO: polling response: ${JSON.stringify(currentState.rawResponse)}`); - if (currentState.done) { - state.result = this.processResult - ? this.processResult(currentState.flatResponse, state) - : currentState.flatResponse; - state.isCompleted = true; - } - else { - this.poll = (_a = currentState.next) !== null && _a !== void 0 ? _a : this.poll; - state.pollingURL = getPollingUrl(currentState.rawResponse, state.pollingURL); - } - lastResponse = currentState; - } - logger.verbose(`LRO: current state: ${JSON.stringify(state)}`); - if (lastResponse) { - (_b = this.updateState) === null || _b === void 0 ? void 0 : _b.call(this, state, lastResponse === null || lastResponse === void 0 ? void 0 : lastResponse.rawResponse); - } - else { - logger.error(`LRO: no response was received`); - } - (_c = options === null || options === void 0 ? void 0 : options.fireProgress) === null || _c === void 0 ? void 0 : _c.call(options, state); - return this; - } - async cancel() { - this.state.isCancelled = true; - return this; - } - /** - * Serializes the Poller operation. - */ - toString() { - return JSON.stringify({ - state: this.state - }); - } -} - -// Copyright (c) Microsoft Corporation. -function deserializeState(serializedState) { - try { - return JSON.parse(serializedState).state; - } - catch (e) { - throw new Error(`LroEngine: Unable to deserialize state: ${serializedState}`); - } -} -/** - * The LRO Engine, a class that performs polling. - */ -class LroEngine extends Poller { - constructor(lro, options) { - const { intervalInMs = 2000, resumeFrom } = options || {}; - const state = resumeFrom - ? deserializeState(resumeFrom) - : {}; - const operation = new GenericPollOperation(state, lro, options === null || options === void 0 ? void 0 : options.lroResourceLocationConfig, options === null || options === void 0 ? void 0 : options.processResult, options === null || options === void 0 ? void 0 : options.updateState, options === null || options === void 0 ? void 0 : options.isDone); - super(operation); - this.config = { intervalInMs: intervalInMs }; - operation.setPollerConfig(this.config); - } - /** - * The method used by the poller to wait before attempting to update its operation. - */ - delay() { - return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs)); - } -} + return Poller; +}()); -exports.LroEngine = LroEngine; exports.Poller = Poller; exports.PollerCancelledError = PollerCancelledError; exports.PollerStoppedError = PollerStoppedError; @@ -61252,8 +64589,7 @@ function restore(id) { core.debug(`primary key is ${primaryKey}`); core.saveState(STATE_CACHE_PRIMARY_KEY, primaryKey); if (primaryKey.endsWith('-')) { - core.warning(`No file in ${process.cwd()} matched to [${packageManager.pattern}], make sure you have checked out the target repository`); - return; + throw new Error(`No file in ${process.cwd()} matched to [${packageManager.pattern}], make sure you have checked out the target repository`); } const matchedKey = yield cache.restoreCache(packageManager.path, primaryKey, [ `${CACHE_KEY_PREFIX}-${process.env['RUNNER_OS']}-${id}` @@ -61346,7 +64682,7 @@ function isProbablyGradleDaemonProblem(packageManager, error) { * limitations under the License. */ Object.defineProperty(exports, "__esModule", { value: true }); -exports.NoopTextMapPropagator = void 0; +exports.NOOP_TEXT_MAP_PROPAGATOR = exports.NoopTextMapPropagator = void 0; /** * No-op implementations of {@link TextMapPropagator}. */ @@ -61365,6 +64701,7 @@ var NoopTextMapPropagator = /** @class */ (function () { return NoopTextMapPropagator; }()); exports.NoopTextMapPropagator = NoopTextMapPropagator; +exports.NOOP_TEXT_MAP_PROPAGATOR = new NoopTextMapPropagator(); //# sourceMappingURL=NoopTextMapPropagator.js.map /***/ }), @@ -61778,7 +65115,30 @@ var SpanStatusCode; /* 935 */, /* 936 */, /* 937 */, -/* 938 */, +/* 938 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=Baggage.js.map + +/***/ }), /* 939 */ /***/ (function(module, __unusedexports, __webpack_require__) { @@ -62822,7 +66182,7 @@ exports.exec = exec; Object.defineProperty(exports, "__esModule", { value: true }); exports.isCompatible = exports._makeCompatibilityCheck = void 0; var version_1 = __webpack_require__(133); -var re = /^(\d+)\.(\d+)\.(\d+)(-(.+))?$/; +var re = /^(\d+)\.(\d+)\.(\d+)(?:-(.*))?$/; /** * Create a function to test an API version to see if it is compatible with the provided ownVersion. * @@ -62851,14 +66211,7 @@ function _makeCompatibilityCheck(ownVersion) { major: +myVersionMatch[1], minor: +myVersionMatch[2], patch: +myVersionMatch[3], - prerelease: myVersionMatch[4], }; - // if ownVersion has a prerelease tag, versions must match exactly - if (ownVersionParsed.prerelease != null) { - return function isExactmatch(globalVersion) { - return globalVersion === ownVersion; - }; - } function _reject(v) { rejectedVersions.add(v); return false; @@ -62884,12 +66237,7 @@ function _makeCompatibilityCheck(ownVersion) { major: +globalVersionMatch[1], minor: +globalVersionMatch[2], patch: +globalVersionMatch[3], - prerelease: globalVersionMatch[4], }; - // if globalVersion has a prerelease tag, versions must match exactly - if (globalVersionParsed.prerelease != null) { - return _reject(globalVersion); - } // major versions must match if (ownVersionParsed.major !== globalVersionParsed.major) { return _reject(globalVersion); diff --git a/dist/setup/index.js b/dist/setup/index.js index e09fd469d..d8f0e773e 100644 --- a/dist/setup/index.js +++ b/dist/setup/index.js @@ -881,24 +881,16 @@ exports.NonDocumentTypeChildNodeImpl = NonDocumentTypeChildNodeImpl; */ Object.defineProperty(exports, "__esModule", { value: true }); exports.PropagationAPI = void 0; -var global_utils_1 = __webpack_require__(525); var NoopTextMapPropagator_1 = __webpack_require__(637); var TextMapPropagator_1 = __webpack_require__(649); -var context_helpers_1 = __webpack_require__(838); -var utils_1 = __webpack_require__(872); -var diag_1 = __webpack_require__(118); +var global_utils_1 = __webpack_require__(525); var API_NAME = 'propagation'; -var NOOP_TEXT_MAP_PROPAGATOR = new NoopTextMapPropagator_1.NoopTextMapPropagator(); /** * Singleton object which represents the entry point to the OpenTelemetry Propagation API */ var PropagationAPI = /** @class */ (function () { /** Empty private constructor prevents end users from constructing a new instance of the API */ function PropagationAPI() { - this.createBaggage = utils_1.createBaggage; - this.getBaggage = context_helpers_1.getBaggage; - this.setBaggage = context_helpers_1.setBaggage; - this.deleteBaggage = context_helpers_1.deleteBaggage; } /** Get the singleton instance of the Propagator API */ PropagationAPI.getInstance = function () { @@ -908,12 +900,11 @@ var PropagationAPI = /** @class */ (function () { return this._instance; }; /** - * Set the current propagator. - * - * @returns true if the propagator was successfully registered, else false + * Set the current propagator. Returns the initialized propagator */ PropagationAPI.prototype.setGlobalPropagator = function (propagator) { - return global_utils_1.registerGlobal(API_NAME, propagator, diag_1.DiagAPI.instance()); + global_utils_1.registerGlobal(API_NAME, propagator); + return propagator; }; /** * Inject context into a carrier to be propagated inter-process @@ -945,10 +936,10 @@ var PropagationAPI = /** @class */ (function () { }; /** Remove the global propagator */ PropagationAPI.prototype.disable = function () { - global_utils_1.unregisterGlobal(API_NAME, diag_1.DiagAPI.instance()); + global_utils_1.unregisterGlobal(API_NAME); }; PropagationAPI.prototype._getGlobalPropagator = function () { - return global_utils_1.getGlobal(API_NAME) || NOOP_TEXT_MAP_PROPAGATOR; + return global_utils_1.getGlobal(API_NAME) || NoopTextMapPropagator_1.NOOP_TEXT_MAP_PROPAGATOR; }; return PropagationAPI; }()); @@ -4231,6 +4222,205 @@ if (typeof Symbol === undefined || !Symbol.asyncIterator) { Object.defineProperty(exports, '__esModule', { value: true }); var api = __webpack_require__(440); +var tslib = __webpack_require__(144); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A no-op implementation of Span that can safely be used without side-effects. + */ +var NoOpSpan = /** @class */ (function () { + function NoOpSpan() { + } + /** + * Returns the SpanContext associated with this Span. + */ + NoOpSpan.prototype.context = function () { + return { + spanId: "", + traceId: "", + traceFlags: 0 /* NONE */ + }; + }; + /** + * Marks the end of Span execution. + * @param _endTime - The time to use as the Span's end time. Defaults to + * the current time. + */ + NoOpSpan.prototype.end = function (_endTime) { + /* Noop */ + }; + /** + * Sets an attribute on the Span + * @param _key - The attribute key + * @param _value - The attribute value + */ + NoOpSpan.prototype.setAttribute = function (_key, _value) { + return this; + }; + /** + * Sets attributes on the Span + * @param _attributes - The attributes to add + */ + NoOpSpan.prototype.setAttributes = function (_attributes) { + return this; + }; + /** + * Adds an event to the Span + * @param _name - The name of the event + * @param _attributes - The associated attributes to add for this event + */ + NoOpSpan.prototype.addEvent = function (_name, _attributes) { + return this; + }; + /** + * Sets a status on the span. Overrides the default of SpanStatusCode.OK. + * @param _status - The status to set. + */ + NoOpSpan.prototype.setStatus = function (_status) { + return this; + }; + /** + * Updates the name of the Span + * @param _name - the new Span name + */ + NoOpSpan.prototype.updateName = function (_name) { + return this; + }; + /** + * Returns whether this span will be recorded + */ + NoOpSpan.prototype.isRecording = function () { + return false; + }; + /** + * Sets exception as a span event + * @param exception - the exception the only accepted values are string or Error + * @param time - the time to set as Span's event time. If not provided, + * use the current time. + */ + NoOpSpan.prototype.recordException = function (_exception, _time) { + /* do nothing */ + }; + return NoOpSpan; +}()); + +// Copyright (c) Microsoft Corporation. +/** + * A no-op implementation of Tracer that can be used when tracing + * is disabled. + */ +var NoOpTracer = /** @class */ (function () { + function NoOpTracer() { + } + /** + * Starts a new Span. + * @param _name - The name of the span. + * @param _options - The SpanOptions used during Span creation. + */ + NoOpTracer.prototype.startSpan = function (_name, _options) { + return new NoOpSpan(); + }; + /** + * Returns the current Span from the current context, if available. + */ + NoOpTracer.prototype.getCurrentSpan = function () { + return new NoOpSpan(); + }; + /** + * Executes the given function within the context provided by a Span. + * @param _span - The span that provides the context. + * @param fn - The function to be executed. + */ + NoOpTracer.prototype.withSpan = function (_span, fn) { + return fn(); + }; + /** + * Bind a Span as the target's scope + * @param target - An object to bind the scope. + * @param _span - A specific Span to use. Otherwise, use the current one. + */ + NoOpTracer.prototype.bind = function (target, _span) { + return target; + }; + return NoOpTracer; +}()); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +function getGlobalObject() { + return global; +} + +// Copyright (c) Microsoft Corporation. +// V1 = OpenTelemetry 0.1 +// V2 = OpenTelemetry 0.2 +// V3 = OpenTelemetry 0.6.1 +// V4 = OpenTelemetry 1.0.0-rc.0 +var GLOBAL_TRACER_VERSION = 4; +// preview5 shipped with @azure/core-tracing.tracerCache +// and didn't have smart detection for collisions +var GLOBAL_TRACER_SYMBOL = Symbol.for("@azure/core-tracing.tracerCache3"); +var cache; +function loadTracerCache() { + var globalObj = getGlobalObject(); + var existingCache = globalObj[GLOBAL_TRACER_SYMBOL]; + var setGlobalCache = true; + if (existingCache) { + if (existingCache.version === GLOBAL_TRACER_VERSION) { + cache = existingCache; + } + else { + setGlobalCache = false; + if (existingCache.tracer) { + throw new Error("Two incompatible versions of @azure/core-tracing have been loaded.\n This library is " + GLOBAL_TRACER_VERSION + ", existing is " + existingCache.version + "."); + } + } + } + if (!cache) { + cache = { + tracer: undefined, + version: GLOBAL_TRACER_VERSION + }; + } + if (setGlobalCache) { + globalObj[GLOBAL_TRACER_SYMBOL] = cache; + } +} +function getCache() { + if (!cache) { + loadTracerCache(); + } + return cache; +} + +// Copyright (c) Microsoft Corporation. +var defaultTracer; +function getDefaultTracer() { + if (!defaultTracer) { + defaultTracer = new NoOpTracer(); + } + return defaultTracer; +} +/** + * Sets the global tracer, enabling tracing for the Azure SDK. + * @param tracer - An OpenTelemetry Tracer instance. + */ +function setTracer(tracer) { + var cache = getCache(); + cache.tracer = tracer; +} +/** + * Retrieves the active tracer, or returns a + * no-op implementation if one is not set. + */ +function getTracer() { + var cache = getCache(); + if (!cache.tracer) { + return getDefaultTracer(); + } + return cache.tracer; +} // Copyright (c) Microsoft Corporation. (function (SpanKind) { @@ -4265,7 +4455,7 @@ var api = __webpack_require__(440); * @param context - context to get span from */ function getSpan(context) { - return api.trace.getSpan(context); + return api.getSpan(context); } /** * Set the span on a context @@ -4274,7 +4464,7 @@ function getSpan(context) { * @param span - span to set active */ function setSpan(context, span) { - return api.trace.setSpan(context, span); + return api.setSpan(context, span); } /** * Wrap span context in a NoopSpan and set as span in a new @@ -4284,7 +4474,7 @@ function setSpan(context, span) { * @param spanContext - span context to be wrapped */ function setSpanContext(context, spanContext) { - return api.trace.setSpanContext(context, spanContext); + return api.setSpanContext(context, spanContext); } /** * Get the span context of the span if it exists. @@ -4292,24 +4482,10 @@ function setSpanContext(context, spanContext) { * @param context - context to get values from */ function getSpanContext(context) { - return api.trace.getSpanContext(context); -} -/** - * Returns true of the given {@link SpanContext} is valid. - * A valid {@link SpanContext} is one which has a valid trace ID and span ID as per the spec. - * - * @param context - the {@link SpanContext} to validate. - * - * @returns true if the {@link SpanContext} is valid, false otherwise. - */ -function isSpanContextValid(context) { - return api.trace.isSpanContextValid(context); -} -function getTracer(name, version) { - return api.trace.getTracer(name || "azure/core-tracing", version); + return api.getSpanContext(context); } /** Entrypoint for context API */ -const context = api.context; +var context = api.context; (function (SpanStatusCode) { /** * The default status. @@ -4327,18 +4503,422 @@ const context = api.context; })(exports.SpanStatusCode || (exports.SpanStatusCode = {})); // Copyright (c) Microsoft Corporation. -function isTracingDisabled() { - var _a; - if (typeof process === "undefined") { - // not supported in browser for now without polyfills - return false; - } - const azureTracingDisabledValue = (_a = process.env.AZURE_TRACING_DISABLED) === null || _a === void 0 ? void 0 : _a.toLowerCase(); - if (azureTracingDisabledValue === "false" || azureTracingDisabledValue === "0") { - return false; +// Licensed under the MIT license. +/** + * @internal + */ +var OpenCensusTraceStateWrapper = /** @class */ (function () { + function OpenCensusTraceStateWrapper(state) { + this._state = state; } - return Boolean(azureTracingDisabledValue); + OpenCensusTraceStateWrapper.prototype.get = function (_key) { + throw new Error("Method not implemented."); + }; + OpenCensusTraceStateWrapper.prototype.set = function (_key, _value) { + throw new Error("Method not implemented."); + }; + OpenCensusTraceStateWrapper.prototype.unset = function (_key) { + throw new Error("Method not implemented"); + }; + OpenCensusTraceStateWrapper.prototype.serialize = function () { + return this._state || ""; + }; + return OpenCensusTraceStateWrapper; +}()); + +// Copyright (c) Microsoft Corporation. +/** An enumeration of canonical status codes. */ +var CanonicalCode; +(function (CanonicalCode) { + /** + * Not an error; returned on success + */ + CanonicalCode[CanonicalCode["OK"] = 0] = "OK"; + /** + * Internal errors. Means some invariants expected by underlying + * system has been broken. If you see one of these errors, + * something is very broken. + */ + CanonicalCode[CanonicalCode["INTERNAL"] = 13] = "INTERNAL"; +})(CanonicalCode || (CanonicalCode = {})); +function isWrappedSpan(span) { + return !!span && span.getWrappedSpan !== undefined; +} +function isTracer(tracerOrSpan) { + return tracerOrSpan.getWrappedTracer !== undefined; } +/** + * An implementation of OpenTelemetry Span that wraps an OpenCensus Span. + */ +var OpenCensusSpanWrapper = /** @class */ (function () { + function OpenCensusSpanWrapper(tracerOrSpan, name, options, context$1) { + if (name === void 0) { name = ""; } + if (options === void 0) { options = {}; } + if (isTracer(tracerOrSpan)) { + var span = getSpan(context$1 !== null && context$1 !== void 0 ? context$1 : context.active()); + var parent = isWrappedSpan(span) ? span.getWrappedSpan() : undefined; + this._span = tracerOrSpan.getWrappedTracer().startChildSpan({ + name: name, + childOf: parent + }); + this._span.start(); + if (options.links) { + for (var _i = 0, _a = options.links; _i < _a.length; _i++) { + var link = _a[_i]; + // Since there is no way to set the link relationship, leave it as Unspecified. + this._span.addLink(link.context.traceId, link.context.spanId, 0 /* LinkType.UNSPECIFIED */, link.attributes); + } + } + } + else { + this._span = tracerOrSpan; + } + } + /** + * The underlying OpenCensus Span + */ + OpenCensusSpanWrapper.prototype.getWrappedSpan = function () { + return this._span; + }; + /** + * Marks the end of Span execution. + * @param endTime - The time to use as the Span's end time. Defaults to + * the current time. + */ + OpenCensusSpanWrapper.prototype.end = function (_endTime) { + this._span.end(); + }; + /** + * Returns the SpanContext associated with this Span. + */ + OpenCensusSpanWrapper.prototype.context = function () { + var openCensusSpanContext = this._span.spanContext; + return { + spanId: openCensusSpanContext.spanId, + traceId: openCensusSpanContext.traceId, + traceFlags: openCensusSpanContext.options, + traceState: new OpenCensusTraceStateWrapper(openCensusSpanContext.traceState) + }; + }; + /** + * Sets an attribute on the Span + * @param key - The attribute key + * @param value - The attribute value + */ + OpenCensusSpanWrapper.prototype.setAttribute = function (key, value) { + this._span.addAttribute(key, value); + return this; + }; + /** + * Sets attributes on the Span + * @param attributes - The attributes to add + */ + OpenCensusSpanWrapper.prototype.setAttributes = function (attributes) { + this._span.attributes = attributes; + return this; + }; + /** + * Adds an event to the Span + * @param name - The name of the event + * @param attributes - The associated attributes to add for this event + */ + OpenCensusSpanWrapper.prototype.addEvent = function (_name, _attributes) { + throw new Error("Method not implemented."); + }; + /** + * Sets a status on the span. Overrides the default of SpanStatusCode.OK. + * @param status - The status to set. + */ + OpenCensusSpanWrapper.prototype.setStatus = function (status) { + switch (status.code) { + case exports.SpanStatusCode.ERROR: { + this._span.setStatus(CanonicalCode.INTERNAL, status.message); + break; + } + case exports.SpanStatusCode.OK: { + this._span.setStatus(CanonicalCode.OK, status.message); + break; + } + case exports.SpanStatusCode.UNSET: { + break; + } + } + return this; + }; + /** + * Updates the name of the Span + * @param name - The new Span name + */ + OpenCensusSpanWrapper.prototype.updateName = function (name) { + this._span.name = name; + return this; + }; + /** + * Returns whether this span will be recorded + */ + OpenCensusSpanWrapper.prototype.isRecording = function () { + // NoRecordSpans have an empty traceId + return !!this._span.traceId; + }; + /** + * Sets exception as a span event + * @param exception - the exception the only accepted values are string or Error + * @param time - the time to set as Span's event time. If not provided, + * use the current time. + */ + OpenCensusSpanWrapper.prototype.recordException = function (_exception, _time) { + throw new Error("Method not implemented"); + }; + return OpenCensusSpanWrapper; +}()); + +// Copyright (c) Microsoft Corporation. +/** + * An implementation of OpenTelemetry Tracer that wraps an OpenCensus Tracer. + */ +var OpenCensusTracerWrapper = /** @class */ (function () { + /** + * Create a new wrapper around a given OpenCensus Tracer. + * @param tracer - The OpenCensus Tracer to wrap. + */ + function OpenCensusTracerWrapper(tracer) { + this._tracer = tracer; + } + /** + * The wrapped OpenCensus Tracer + */ + OpenCensusTracerWrapper.prototype.getWrappedTracer = function () { + return this._tracer; + }; + /** + * Starts a new Span. + * @param name - The name of the span. + * @param options - The SpanOptions used during Span creation. + */ + OpenCensusTracerWrapper.prototype.startSpan = function (name, options) { + return new OpenCensusSpanWrapper(this, name, options); + }; + /** + * Returns the current Span from the current context, if available. + */ + OpenCensusTracerWrapper.prototype.getCurrentSpan = function () { + return undefined; + }; + /** + * Executes the given function within the context provided by a Span. + * @param _span - The span that provides the context. + * @param _fn - The function to be executed. + */ + OpenCensusTracerWrapper.prototype.withSpan = function (_span, _fn) { + throw new Error("Method not implemented."); + }; + /** + * Bind a Span as the target's scope + * @param target - An object to bind the scope. + * @param _span - A specific Span to use. Otherwise, use the current one. + */ + OpenCensusTracerWrapper.prototype.bind = function (_target, _span) { + throw new Error("Method not implemented."); + }; + return OpenCensusTracerWrapper; +}()); + +// Copyright (c) Microsoft Corporation. +/** + * A mock span useful for testing. + */ +var TestSpan = /** @class */ (function (_super) { + tslib.__extends(TestSpan, _super); + /** + * Starts a new Span. + * @param parentTracer- The tracer that created this Span + * @param name - The name of the span. + * @param context - The SpanContext this span belongs to + * @param kind - The SpanKind of this Span + * @param parentSpanId - The identifier of the parent Span + * @param startTime - The startTime of the event (defaults to now) + */ + function TestSpan(parentTracer, name, context, kind, parentSpanId, startTime) { + if (startTime === void 0) { startTime = Date.now(); } + var _this = _super.call(this) || this; + _this._tracer = parentTracer; + _this.name = name; + _this.kind = kind; + _this.startTime = startTime; + _this.parentSpanId = parentSpanId; + _this.status = { + code: exports.SpanStatusCode.OK + }; + _this.endCalled = false; + _this._context = context; + _this.attributes = {}; + return _this; + } + /** + * Returns the Tracer that created this Span + */ + TestSpan.prototype.tracer = function () { + return this._tracer; + }; + /** + * Returns the SpanContext associated with this Span. + */ + TestSpan.prototype.context = function () { + return this._context; + }; + /** + * Marks the end of Span execution. + * @param _endTime - The time to use as the Span's end time. Defaults to + * the current time. + */ + TestSpan.prototype.end = function (_endTime) { + this.endCalled = true; + }; + /** + * Sets a status on the span. Overrides the default of SpanStatusCode.OK. + * @param status - The status to set. + */ + TestSpan.prototype.setStatus = function (status) { + this.status = status; + return this; + }; + /** + * Returns whether this span will be recorded + */ + TestSpan.prototype.isRecording = function () { + return true; + }; + /** + * Sets an attribute on the Span + * @param key - The attribute key + * @param value - The attribute value + */ + TestSpan.prototype.setAttribute = function (key, value) { + this.attributes[key] = value; + return this; + }; + /** + * Sets attributes on the Span + * @param attributes - The attributes to add + */ + TestSpan.prototype.setAttributes = function (attributes) { + for (var _i = 0, _a = Object.keys(attributes); _i < _a.length; _i++) { + var key = _a[_i]; + this.attributes[key] = attributes[key]; + } + return this; + }; + return TestSpan; +}(NoOpSpan)); + +// Copyright (c) Microsoft Corporation. +/** + * A mock tracer useful for testing + */ +var TestTracer = /** @class */ (function (_super) { + tslib.__extends(TestTracer, _super); + function TestTracer() { + var _this = _super !== null && _super.apply(this, arguments) || this; + _this.traceIdCounter = 0; + _this.spanIdCounter = 0; + _this.rootSpans = []; + _this.knownSpans = []; + return _this; + } + TestTracer.prototype.getNextTraceId = function () { + this.traceIdCounter++; + return String(this.traceIdCounter); + }; + TestTracer.prototype.getNextSpanId = function () { + this.spanIdCounter++; + return String(this.spanIdCounter); + }; + /** + * Returns all Spans that were created without a parent + */ + TestTracer.prototype.getRootSpans = function () { + return this.rootSpans; + }; + /** + * Returns all Spans this Tracer knows about + */ + TestTracer.prototype.getKnownSpans = function () { + return this.knownSpans; + }; + /** + * Returns all Spans where end() has not been called + */ + TestTracer.prototype.getActiveSpans = function () { + return this.knownSpans.filter(function (span) { + return !span.endCalled; + }); + }; + /** + * Return all Spans for a particular trace, grouped by their + * parent Span in a tree-like structure + * @param traceId - The traceId to return the graph for + */ + TestTracer.prototype.getSpanGraph = function (traceId) { + var traceSpans = this.knownSpans.filter(function (span) { + return span.context().traceId === traceId; + }); + var roots = []; + var nodeMap = new Map(); + for (var _i = 0, traceSpans_1 = traceSpans; _i < traceSpans_1.length; _i++) { + var span = traceSpans_1[_i]; + var spanId = span.context().spanId; + var node = { + name: span.name, + children: [] + }; + nodeMap.set(spanId, node); + if (span.parentSpanId) { + var parent = nodeMap.get(span.parentSpanId); + if (!parent) { + throw new Error("Span with name " + node.name + " has an unknown parentSpan with id " + span.parentSpanId); + } + parent.children.push(node); + } + else { + roots.push(node); + } + } + return { + roots: roots + }; + }; + /** + * Starts a new Span. + * @param name - The name of the span. + * @param options - The SpanOptions used during Span creation. + */ + TestTracer.prototype.startSpan = function (name, options, context$1) { + var parentContext = getSpanContext(context$1 || context.active()); + var traceId; + var isRootSpan = false; + if (parentContext && parentContext.traceId) { + traceId = parentContext.traceId; + } + else { + traceId = this.getNextTraceId(); + isRootSpan = true; + } + var spanContext = { + traceId: traceId, + spanId: this.getNextSpanId(), + traceFlags: 0 /* NONE */ + }; + var span = new TestSpan(this, name, spanContext, (options === null || options === void 0 ? void 0 : options.kind) || exports.SpanKind.INTERNAL, parentContext ? parentContext.spanId : undefined, options === null || options === void 0 ? void 0 : options.startTime); + this.knownSpans.push(span); + if (isRootSpan) { + this.rootSpans.push(span); + } + return span; + }; + return TestTracer; +}(NoOpTracer)); + +// Copyright (c) Microsoft Corporation. /** * Creates a function that can be used to create spans using the global tracer. * @@ -4359,28 +4939,22 @@ function isTracingDisabled() { */ function createSpanFunction(args) { return function (operationName, operationOptions) { - const tracer = getTracer(); - const tracingOptions = (operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) || {}; - const spanOptions = Object.assign({ kind: exports.SpanKind.INTERNAL }, tracingOptions.spanOptions); - const spanName = args.packagePrefix ? `${args.packagePrefix}.${operationName}` : operationName; - let span; - if (isTracingDisabled()) { - span = api.trace.wrapSpanContext(api.INVALID_SPAN_CONTEXT); - } - else { - span = tracer.startSpan(spanName, spanOptions, tracingOptions.tracingContext); - } + var tracer = getTracer(); + var tracingOptions = (operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) || {}; + var spanOptions = tslib.__assign({ kind: exports.SpanKind.INTERNAL }, tracingOptions.spanOptions); + var spanName = args.packagePrefix ? args.packagePrefix + "." + operationName : operationName; + var span = tracer.startSpan(spanName, spanOptions, tracingOptions.tracingContext); if (args.namespace) { span.setAttribute("az.namespace", args.namespace); } - let newSpanOptions = tracingOptions.spanOptions || {}; + var newSpanOptions = tracingOptions.spanOptions || {}; if (span.isRecording() && args.namespace) { - newSpanOptions = Object.assign(Object.assign({}, tracingOptions.spanOptions), { attributes: Object.assign(Object.assign({}, spanOptions.attributes), { "az.namespace": args.namespace }) }); + newSpanOptions = tslib.__assign(tslib.__assign({}, tracingOptions.spanOptions), { attributes: tslib.__assign(tslib.__assign({}, spanOptions.attributes), { "az.namespace": args.namespace }) }); } - const newTracingOptions = Object.assign(Object.assign({}, tracingOptions), { spanOptions: newSpanOptions, tracingContext: setSpan(tracingOptions.tracingContext || context.active(), span) }); - const newOperationOptions = Object.assign(Object.assign({}, operationOptions), { tracingOptions: newTracingOptions }); + var newTracingOptions = tslib.__assign(tslib.__assign({}, tracingOptions), { spanOptions: newSpanOptions, tracingContext: setSpan(tracingOptions.tracingContext || context.active(), span) }); + var newOperationOptions = tslib.__assign(tslib.__assign({}, operationOptions), { tracingOptions: newTracingOptions }); return { - span, + span: span, updatedOptions: newOperationOptions }; }; @@ -4388,26 +4962,26 @@ function createSpanFunction(args) { // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -const VERSION = "00"; +var VERSION = "00"; /** * Generates a `SpanContext` given a `traceparent` header value. * @param traceParent - Serialized span context data as a `traceparent` header value. * @returns The `SpanContext` generated from the `traceparent` value. */ function extractSpanContextFromTraceParentHeader(traceParentHeader) { - const parts = traceParentHeader.split("-"); + var parts = traceParentHeader.split("-"); if (parts.length !== 4) { return; } - const [version, traceId, spanId, traceOptions] = parts; + var version = parts[0], traceId = parts[1], spanId = parts[2], traceOptions = parts[3]; if (version !== VERSION) { return; } - const traceFlags = parseInt(traceOptions, 16); - const spanContext = { - spanId, - traceId, - traceFlags + var traceFlags = parseInt(traceOptions, 16); + var spanContext = { + spanId: spanId, + traceId: traceId, + traceFlags: traceFlags }; return spanContext; } @@ -4417,7 +4991,7 @@ function extractSpanContextFromTraceParentHeader(traceParentHeader) { * @returns The `spanContext` represented as a `traceparent` value. */ function getTraceParentHeader(spanContext) { - const missingFields = []; + var missingFields = []; if (!spanContext.traceId) { missingFields.push("traceId"); } @@ -4427,13 +5001,19 @@ function getTraceParentHeader(spanContext) { if (missingFields.length) { return; } - const flags = spanContext.traceFlags || 0 /* NONE */; - const hexFlags = flags.toString(16); - const traceFlags = hexFlags.length === 1 ? `0${hexFlags}` : hexFlags; + var flags = spanContext.traceFlags || 0 /* NONE */; + var hexFlags = flags.toString(16); + var traceFlags = hexFlags.length === 1 ? "0" + hexFlags : hexFlags; // https://www.w3.org/TR/trace-context/#traceparent-header-field-values - return `${VERSION}-${spanContext.traceId}-${spanContext.spanId}-${traceFlags}`; + return VERSION + "-" + spanContext.traceId + "-" + spanContext.spanId + "-" + traceFlags; } +exports.NoOpSpan = NoOpSpan; +exports.NoOpTracer = NoOpTracer; +exports.OpenCensusSpanWrapper = OpenCensusSpanWrapper; +exports.OpenCensusTracerWrapper = OpenCensusTracerWrapper; +exports.TestSpan = TestSpan; +exports.TestTracer = TestTracer; exports.context = context; exports.createSpanFunction = createSpanFunction; exports.extractSpanContextFromTraceParentHeader = extractSpanContextFromTraceParentHeader; @@ -4441,9 +5021,9 @@ exports.getSpan = getSpan; exports.getSpanContext = getSpanContext; exports.getTraceParentHeader = getTraceParentHeader; exports.getTracer = getTracer; -exports.isSpanContextValid = isSpanContextValid; exports.setSpan = setSpan; exports.setSpanContext = setSpanContext; +exports.setTracer = setTracer; //# sourceMappingURL=index.js.map @@ -4531,16 +5111,17 @@ module.exports = new Type('tag:yaml.org,2002:pairs', { * See the License for the specific language governing permissions and * limitations under the License. */ -var __spreadArray = (this && this.__spreadArray) || function (to, from) { - for (var i = 0, il = from.length, j = to.length; i < il; i++, j++) - to[j] = from[i]; - return to; +var __spreadArrays = (this && this.__spreadArrays) || function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.ContextAPI = void 0; var NoopContextManager_1 = __webpack_require__(754); var global_utils_1 = __webpack_require__(525); -var diag_1 = __webpack_require__(118); var API_NAME = 'context'; var NOOP_CONTEXT_MANAGER = new NoopContextManager_1.NoopContextManager(); /** @@ -4558,12 +5139,11 @@ var ContextAPI = /** @class */ (function () { return this._instance; }; /** - * Set the current context manager. - * - * @returns true if the context manager was successfully registered, else false + * Set the current context manager. Returns the initialized context manager */ ContextAPI.prototype.setGlobalContextManager = function (contextManager) { - return global_utils_1.registerGlobal(API_NAME, contextManager, diag_1.DiagAPI.instance()); + global_utils_1.registerGlobal(API_NAME, contextManager); + return contextManager; }; /** * Get the currently active context @@ -4585,16 +5165,17 @@ var ContextAPI = /** @class */ (function () { for (var _i = 3; _i < arguments.length; _i++) { args[_i - 3] = arguments[_i]; } - return (_a = this._getContextManager()).with.apply(_a, __spreadArray([context, fn, thisArg], args)); + return (_a = this._getContextManager()).with.apply(_a, __spreadArrays([context, fn, thisArg], args)); }; /** * Bind a context to a target function or event emitter * - * @param context context to bind to the event emitter or function. Defaults to the currently active context * @param target function or event emitter to bind + * @param context context to bind to the event emitter or function. Defaults to the currently active context */ - ContextAPI.prototype.bind = function (context, target) { - return this._getContextManager().bind(context, target); + ContextAPI.prototype.bind = function (target, context) { + if (context === void 0) { context = this.active(); } + return this._getContextManager().bind(target, context); }; ContextAPI.prototype._getContextManager = function () { return global_utils_1.getGlobal(API_NAME) || NOOP_CONTEXT_MANAGER; @@ -4602,7 +5183,7 @@ var ContextAPI = /** @class */ (function () { /** Disable and remove the global context manager */ ContextAPI.prototype.disable = function () { this._getContextManager().disable(); - global_utils_1.unregisterGlobal(API_NAME, diag_1.DiagAPI.instance()); + global_utils_1.unregisterGlobal(API_NAME); }; return ContextAPI; }()); @@ -8599,36 +9180,48 @@ exports.eventTarget_removeAllEventListeners = eventTarget_removeAllEventListener /***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; +/*! + * Copyright (c) Microsoft and contributors. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for + * license information. + * + * Azure Core LRO SDK for JavaScript - 1.0.5 + */ Object.defineProperty(exports, '__esModule', { value: true }); -var logger$1 = __webpack_require__(492); +var tslib = __webpack_require__(725); // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. /** * When a poller is manually stopped through the `stopPolling` method, * the poller will be rejected with an instance of the PollerStoppedError. */ -class PollerStoppedError extends Error { - constructor(message) { - super(message); - this.name = "PollerStoppedError"; - Object.setPrototypeOf(this, PollerStoppedError.prototype); +var PollerStoppedError = /** @class */ (function (_super) { + tslib.__extends(PollerStoppedError, _super); + function PollerStoppedError(message) { + var _this = _super.call(this, message) || this; + _this.name = "PollerStoppedError"; + Object.setPrototypeOf(_this, PollerStoppedError.prototype); + return _this; } -} + return PollerStoppedError; +}(Error)); /** * When a poller is cancelled through the `cancelOperation` method, * the poller will be rejected with an instance of the PollerCancelledError. */ -class PollerCancelledError extends Error { - constructor(message) { - super(message); - this.name = "PollerCancelledError"; - Object.setPrototypeOf(this, PollerCancelledError.prototype); +var PollerCancelledError = /** @class */ (function (_super) { + tslib.__extends(PollerCancelledError, _super); + function PollerCancelledError(message) { + var _this = _super.call(this, message) || this; + _this.name = "PollerCancelledError"; + Object.setPrototypeOf(_this, PollerCancelledError.prototype); + return _this; } -} + return PollerCancelledError; +}(Error)); /** * A class that represents the definition of a program that polls through consecutive requests * until it reaches a state of completion. @@ -8691,7 +9284,7 @@ class PollerCancelledError extends Error { * */ // eslint-disable-next-line no-use-before-define -class Poller { +var Poller = /** @class */ (function () { /** * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. * @@ -8757,18 +9350,19 @@ class Poller { * * @param operation - Must contain the basic properties of `PollOperation`. */ - constructor(operation) { + function Poller(operation) { + var _this = this; this.stopped = true; this.pollProgressCallbacks = []; this.operation = operation; - this.promise = new Promise((resolve, reject) => { - this.resolve = resolve; - this.reject = reject; + this.promise = new Promise(function (resolve, reject) { + _this.resolve = resolve; + _this.reject = reject; }); // This prevents the UnhandledPromiseRejectionWarning in node.js from being thrown. // The above warning would get thrown if `poller.poll` is called, it returns an error, // and pullUntilDone did not have a .catch or await try/catch on it's return value. - this.promise.catch(() => { + this.promise.catch(function () { /* intentionally blank */ }); } @@ -8777,15 +9371,29 @@ class Poller { * Starts a loop that will break only if the poller is done * or if the poller is stopped. */ - async startPolling() { - if (this.stopped) { - this.stopped = false; - } - while (!this.isStopped() && !this.isDone()) { - await this.poll(); - await this.delay(); - } - } + Poller.prototype.startPolling = function () { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + if (this.stopped) { + this.stopped = false; + } + _a.label = 1; + case 1: + if (!(!this.isStopped() && !this.isDone())) return [3 /*break*/, 4]; + return [4 /*yield*/, this.poll()]; + case 2: + _a.sent(); + return [4 /*yield*/, this.delay()]; + case 3: + _a.sent(); + return [3 /*break*/, 1]; + case 4: return [2 /*return*/]; + } + }); + }); + }; /** * @internal * pollOnce does one polling, by calling to the update method of the underlying @@ -8795,31 +9403,44 @@ class Poller { * * @param options - Optional properties passed to the operation's update method. */ - async pollOnce(options = {}) { - try { - if (!this.isDone()) { - this.operation = await this.operation.update({ - abortSignal: options.abortSignal, - fireProgress: this.fireProgress.bind(this) - }); - if (this.isDone() && this.resolve) { - // If the poller has finished polling, this means we now have a result. - // However, it can be the case that TResult is instantiated to void, so - // we are not expecting a result anyway. To assert that we might not - // have a result eventually after finishing polling, we cast the result - // to TResult. - this.resolve(this.operation.state.result); + Poller.prototype.pollOnce = function (options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, e_1; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _b.trys.push([0, 3, , 4]); + if (!!this.isDone()) return [3 /*break*/, 2]; + _a = this; + return [4 /*yield*/, this.operation.update({ + abortSignal: options.abortSignal, + fireProgress: this.fireProgress.bind(this) + })]; + case 1: + _a.operation = _b.sent(); + if (this.isDone() && this.resolve) { + // If the poller has finished polling, this means we now have a result. + // However, it can be the case that TResult is instantiated to void, so + // we are not expecting a result anyway. To assert that we might not + // have a result eventually after finishing polling, we cast the result + // to TResult. + this.resolve(this.operation.state.result); + } + _b.label = 2; + case 2: return [3 /*break*/, 4]; + case 3: + e_1 = _b.sent(); + this.operation.state.error = e_1; + if (this.reject) { + this.reject(e_1); + } + throw e_1; + case 4: return [2 /*return*/]; } - } - } - catch (e) { - this.operation.state.error = e; - if (this.reject) { - this.reject(e); - } - throw e; - } - } + }); + }); + }; /** * @internal * fireProgress calls the functions passed in via onProgress the method of the poller. @@ -8829,22 +9450,36 @@ class Poller { * * @param state - The current operation state. */ - fireProgress(state) { - for (const callback of this.pollProgressCallbacks) { + Poller.prototype.fireProgress = function (state) { + for (var _i = 0, _a = this.pollProgressCallbacks; _i < _a.length; _i++) { + var callback = _a[_i]; callback(state); } - } + }; /** * @internal * Invokes the underlying operation's cancel method, and rejects the * pollUntilDone promise. */ - async cancelOnce(options = {}) { - this.operation = await this.operation.cancel(options); - if (this.reject) { - this.reject(new PollerCancelledError("Poller cancelled")); - } - } + Poller.prototype.cancelOnce = function (options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = this; + return [4 /*yield*/, this.operation.cancel(options)]; + case 1: + _a.operation = _b.sent(); + if (this.reject) { + this.reject(new PollerCancelledError("Poller cancelled")); + } + return [2 /*return*/]; + } + }); + }); + }; /** * Returns a promise that will resolve once a single polling request finishes. * It does this by calling the update method of the Poller's operation. @@ -8853,61 +9488,68 @@ class Poller { * * @param options - Optional properties passed to the operation's update method. */ - poll(options = {}) { + Poller.prototype.poll = function (options) { + var _this = this; + if (options === void 0) { options = {}; } if (!this.pollOncePromise) { this.pollOncePromise = this.pollOnce(options); - const clearPollOncePromise = () => { - this.pollOncePromise = undefined; + var clearPollOncePromise = function () { + _this.pollOncePromise = undefined; }; this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject); } return this.pollOncePromise; - } + }; /** * Returns a promise that will resolve once the underlying operation is completed. */ - async pollUntilDone() { - if (this.stopped) { - this.startPolling().catch(this.reject); - } - return this.promise; - } + Poller.prototype.pollUntilDone = function () { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + if (this.stopped) { + this.startPolling().catch(this.reject); + } + return [2 /*return*/, this.promise]; + }); + }); + }; /** * Invokes the provided callback after each polling is completed, * sending the current state of the poller's operation. * * It returns a method that can be used to stop receiving updates on the given callback function. */ - onProgress(callback) { + Poller.prototype.onProgress = function (callback) { + var _this = this; this.pollProgressCallbacks.push(callback); - return () => { - this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback); + return function () { + _this.pollProgressCallbacks = _this.pollProgressCallbacks.filter(function (c) { return c !== callback; }); }; - } + }; /** * Returns true if the poller has finished polling. */ - isDone() { - const state = this.operation.state; + Poller.prototype.isDone = function () { + var state = this.operation.state; return Boolean(state.isCompleted || state.isCancelled || state.error); - } + }; /** * Stops the poller from continuing to poll. */ - stopPolling() { + Poller.prototype.stopPolling = function () { if (!this.stopped) { this.stopped = true; if (this.reject) { this.reject(new PollerStoppedError("This poller is already stopped")); } } - } + }; /** * Returns true if the poller is stopped. */ - isStopped() { + Poller.prototype.isStopped = function () { return this.stopped; - } + }; /** * Attempts to cancel the underlying operation. * @@ -8917,7 +9559,8 @@ class Poller { * * @param options - Optional properties passed to the operation's update method. */ - cancelOperation(options = {}) { + Poller.prototype.cancelOperation = function (options) { + if (options === void 0) { options = {}; } if (!this.stopped) { this.stopped = true; } @@ -8928,7 +9571,7 @@ class Poller { throw new Error("A cancel request is currently pending"); } return this.cancelPromise; - } + }; /** * Returns the state of the operation. * @@ -8977,379 +9620,29 @@ class Poller { * `../test/utils/testPoller.ts` * and look for the getOperationState implementation. */ - getOperationState() { + Poller.prototype.getOperationState = function () { return this.operation.state; - } + }; /** * Returns the result value of the operation, * regardless of the state of the poller. * It can return undefined or an incomplete form of the final TResult value * depending on the implementation. */ - getResult() { - const state = this.operation.state; + Poller.prototype.getResult = function () { + var state = this.operation.state; return state.result; - } + }; /** * Returns a serialized version of the poller's operation * by invoking the operation's toString method. */ - toString() { + Poller.prototype.toString = function () { return this.operation.toString(); - } -} - -// Copyright (c) Microsoft Corporation. -/** - * The `@azure/logger` configuration for this package. - * @internal - */ -const logger = logger$1.createClientLogger("core-lro"); - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Detects where the continuation token is and returns it. Notice that azure-asyncoperation - * must be checked first before the other location headers because there are scenarios - * where both azure-asyncoperation and location could be present in the same response but - * azure-asyncoperation should be the one to use for polling. - */ -function getPollingUrl(rawResponse, defaultPath) { - var _a, _b, _c; - return ((_c = (_b = (_a = getAzureAsyncOperation(rawResponse)) !== null && _a !== void 0 ? _a : getLocation(rawResponse)) !== null && _b !== void 0 ? _b : getOperationLocation(rawResponse)) !== null && _c !== void 0 ? _c : defaultPath); -} -function getLocation(rawResponse) { - return rawResponse.headers["location"]; -} -function getOperationLocation(rawResponse) { - return rawResponse.headers["operation-location"]; -} -function getAzureAsyncOperation(rawResponse) { - return rawResponse.headers["azure-asyncoperation"]; -} -function inferLroMode(requestPath, requestMethod, rawResponse) { - if (getAzureAsyncOperation(rawResponse) !== undefined) { - return { - mode: "AzureAsync", - resourceLocation: requestMethod === "PUT" - ? requestPath - : requestMethod === "POST" - ? getLocation(rawResponse) - : undefined - }; - } - else if (getLocation(rawResponse) !== undefined || - getOperationLocation(rawResponse) !== undefined) { - return { - mode: "Location" - }; - } - else if (["PUT", "PATCH"].includes(requestMethod)) { - return { - mode: "Body" - }; - } - return {}; -} -class SimpleRestError extends Error { - constructor(message, statusCode) { - super(message); - this.name = "RestError"; - this.statusCode = statusCode; - Object.setPrototypeOf(this, SimpleRestError.prototype); - } -} -function isUnexpectedInitialResponse(rawResponse) { - const code = rawResponse.statusCode; - if (![203, 204, 202, 201, 200, 500].includes(code)) { - throw new SimpleRestError(`Received unexpected HTTP status code ${code} in the initial response. This may indicate a server issue.`, code); - } - return false; -} -function isUnexpectedPollingResponse(rawResponse) { - const code = rawResponse.statusCode; - if (![202, 201, 200, 500].includes(code)) { - throw new SimpleRestError(`Received unexpected HTTP status code ${code} while polling. This may indicate a server issue.`, code); - } - return false; -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const successStates = ["succeeded"]; -const failureStates = ["failed", "canceled", "cancelled"]; - -// Copyright (c) Microsoft Corporation. -function getResponseStatus(rawResponse) { - var _a, _b; - const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; - return (_b = status === null || status === void 0 ? void 0 : status.toLowerCase()) !== null && _b !== void 0 ? _b : "succeeded"; -} -function isAzureAsyncPollingDone(rawResponse) { - const state = getResponseStatus(rawResponse); - if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) { - throw new Error(`The long running operation has failed. The provisioning state: ${state}.`); - } - return successStates.includes(state); -} -/** - * Sends a request to the URI of the provisioned resource if needed. - */ -async function sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig) { - switch (lroResourceLocationConfig) { - case "original-uri": - return lro.sendPollRequest(lro.requestPath); - case "azure-async-operation": - return undefined; - case "location": - default: - return lro.sendPollRequest(resourceLocation !== null && resourceLocation !== void 0 ? resourceLocation : lro.requestPath); - } -} -function processAzureAsyncOperationResult(lro, resourceLocation, lroResourceLocationConfig) { - return (response) => { - if (isAzureAsyncPollingDone(response.rawResponse)) { - if (resourceLocation === undefined) { - return Object.assign(Object.assign({}, response), { done: true }); - } - else { - return Object.assign(Object.assign({}, response), { done: false, next: async () => { - const finalResponse = await sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig); - return Object.assign(Object.assign({}, (finalResponse !== null && finalResponse !== void 0 ? finalResponse : response)), { done: true }); - } }); - } - } - return Object.assign(Object.assign({}, response), { done: false }); }; -} - -// Copyright (c) Microsoft Corporation. -function getProvisioningState(rawResponse) { - var _a, _b, _c; - const { properties, provisioningState } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; - const state = (_b = properties === null || properties === void 0 ? void 0 : properties.provisioningState) !== null && _b !== void 0 ? _b : provisioningState; - return (_c = state === null || state === void 0 ? void 0 : state.toLowerCase()) !== null && _c !== void 0 ? _c : "succeeded"; -} -function isBodyPollingDone(rawResponse) { - const state = getProvisioningState(rawResponse); - if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) { - throw new Error(`The long running operation has failed. The provisioning state: ${state}.`); - } - return successStates.includes(state); -} -/** - * Creates a polling strategy based on BodyPolling which uses the provisioning state - * from the result to determine the current operation state - */ -function processBodyPollingOperationResult(response) { - return Object.assign(Object.assign({}, response), { done: isBodyPollingDone(response.rawResponse) }); -} - -// Copyright (c) Microsoft Corporation. -function isLocationPollingDone(rawResponse) { - return !isUnexpectedPollingResponse(rawResponse) && rawResponse.statusCode !== 202; -} -function processLocationPollingOperationResult(response) { - return Object.assign(Object.assign({}, response), { done: isLocationPollingDone(response.rawResponse) }); -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -function processPassthroughOperationResult(response) { - return Object.assign(Object.assign({}, response), { done: true }); -} - -// Copyright (c) Microsoft Corporation. -/** - * creates a stepping function that maps an LRO state to another. - */ -function createGetLroStatusFromResponse(lroPrimitives, config, lroResourceLocationConfig) { - switch (config.mode) { - case "AzureAsync": { - return processAzureAsyncOperationResult(lroPrimitives, config.resourceLocation, lroResourceLocationConfig); - } - case "Location": { - return processLocationPollingOperationResult; - } - case "Body": { - return processBodyPollingOperationResult; - } - default: { - return processPassthroughOperationResult; - } - } -} -/** - * Creates a polling operation. - */ -function createPoll(lroPrimitives) { - return async (path, pollerConfig, getLroStatusFromResponse) => { - const response = await lroPrimitives.sendPollRequest(path); - const retryAfter = response.rawResponse.headers["retry-after"]; - if (retryAfter !== undefined) { - const retryAfterInMs = parseInt(retryAfter); - pollerConfig.intervalInMs = isNaN(retryAfterInMs) - ? calculatePollingIntervalFromDate(new Date(retryAfter), pollerConfig.intervalInMs) - : retryAfterInMs; - } - return getLroStatusFromResponse(response); - }; -} -function calculatePollingIntervalFromDate(retryAfterDate, defaultIntervalInMs) { - const timeNow = Math.floor(new Date().getTime()); - const retryAfterTime = retryAfterDate.getTime(); - if (timeNow < retryAfterTime) { - return retryAfterTime - timeNow; - } - return defaultIntervalInMs; -} -/** - * Creates a callback to be used to initialize the polling operation state. - * @param state - of the polling operation - * @param operationSpec - of the LRO - * @param callback - callback to be called when the operation is done - * @returns callback that initializes the state of the polling operation - */ -function createInitializeState(state, requestPath, requestMethod) { - return (response) => { - if (isUnexpectedInitialResponse(response.rawResponse)) - ; - state.initialRawResponse = response.rawResponse; - state.isStarted = true; - state.pollingURL = getPollingUrl(state.initialRawResponse, requestPath); - state.config = inferLroMode(requestPath, requestMethod, state.initialRawResponse); - /** short circuit polling if body polling is done in the initial request */ - if (state.config.mode === undefined || - (state.config.mode === "Body" && isBodyPollingDone(state.initialRawResponse))) { - state.result = response.flatResponse; - state.isCompleted = true; - } - logger.verbose(`LRO: initial state: ${JSON.stringify(state)}`); - return Boolean(state.isCompleted); - }; -} - -// Copyright (c) Microsoft Corporation. -class GenericPollOperation { - constructor(state, lro, lroResourceLocationConfig, processResult, updateState, isDone) { - this.state = state; - this.lro = lro; - this.lroResourceLocationConfig = lroResourceLocationConfig; - this.processResult = processResult; - this.updateState = updateState; - this.isDone = isDone; - } - setPollerConfig(pollerConfig) { - this.pollerConfig = pollerConfig; - } - /** - * General update function for LROPoller, the general process is as follows - * 1. Check initial operation result to determine the strategy to use - * - Strategies: Location, Azure-AsyncOperation, Original Uri - * 2. Check if the operation result has a terminal state - * - Terminal state will be determined by each strategy - * 2.1 If it is terminal state Check if a final GET request is required, if so - * send final GET request and return result from operation. If no final GET - * is required, just return the result from operation. - * - Determining what to call for final request is responsibility of each strategy - * 2.2 If it is not terminal state, call the polling operation and go to step 1 - * - Determining what to call for polling is responsibility of each strategy - * - Strategies will always use the latest URI for polling if provided otherwise - * the last known one - */ - async update(options) { - var _a, _b, _c; - const state = this.state; - let lastResponse = undefined; - if (!state.isStarted) { - const initializeState = createInitializeState(state, this.lro.requestPath, this.lro.requestMethod); - lastResponse = await this.lro.sendInitialRequest(); - initializeState(lastResponse); - } - if (!state.isCompleted) { - if (!this.poll || !this.getLroStatusFromResponse) { - if (!state.config) { - throw new Error("Bad state: LRO mode is undefined. Please check if the serialized state is well-formed."); - } - const isDone = this.isDone; - this.getLroStatusFromResponse = isDone - ? (response) => (Object.assign(Object.assign({}, response), { done: isDone(response.flatResponse, this.state) })) - : createGetLroStatusFromResponse(this.lro, state.config, this.lroResourceLocationConfig); - this.poll = createPoll(this.lro); - } - if (!state.pollingURL) { - throw new Error("Bad state: polling URL is undefined. Please check if the serialized state is well-formed."); - } - const currentState = await this.poll(state.pollingURL, this.pollerConfig, this.getLroStatusFromResponse); - logger.verbose(`LRO: polling response: ${JSON.stringify(currentState.rawResponse)}`); - if (currentState.done) { - state.result = this.processResult - ? this.processResult(currentState.flatResponse, state) - : currentState.flatResponse; - state.isCompleted = true; - } - else { - this.poll = (_a = currentState.next) !== null && _a !== void 0 ? _a : this.poll; - state.pollingURL = getPollingUrl(currentState.rawResponse, state.pollingURL); - } - lastResponse = currentState; - } - logger.verbose(`LRO: current state: ${JSON.stringify(state)}`); - if (lastResponse) { - (_b = this.updateState) === null || _b === void 0 ? void 0 : _b.call(this, state, lastResponse === null || lastResponse === void 0 ? void 0 : lastResponse.rawResponse); - } - else { - logger.error(`LRO: no response was received`); - } - (_c = options === null || options === void 0 ? void 0 : options.fireProgress) === null || _c === void 0 ? void 0 : _c.call(options, state); - return this; - } - async cancel() { - this.state.isCancelled = true; - return this; - } - /** - * Serializes the Poller operation. - */ - toString() { - return JSON.stringify({ - state: this.state - }); - } -} - -// Copyright (c) Microsoft Corporation. -function deserializeState(serializedState) { - try { - return JSON.parse(serializedState).state; - } - catch (e) { - throw new Error(`LroEngine: Unable to deserialize state: ${serializedState}`); - } -} -/** - * The LRO Engine, a class that performs polling. - */ -class LroEngine extends Poller { - constructor(lro, options) { - const { intervalInMs = 2000, resumeFrom } = options || {}; - const state = resumeFrom - ? deserializeState(resumeFrom) - : {}; - const operation = new GenericPollOperation(state, lro, options === null || options === void 0 ? void 0 : options.lroResourceLocationConfig, options === null || options === void 0 ? void 0 : options.processResult, options === null || options === void 0 ? void 0 : options.updateState, options === null || options === void 0 ? void 0 : options.isDone); - super(operation); - this.config = { intervalInMs: intervalInMs }; - operation.setPollerConfig(this.config); - } - /** - * The method used by the poller to wait before attempting to update its operation. - */ - delay() { - return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs)); - } -} + return Poller; +}()); -exports.LroEngine = LroEngine; exports.Poller = Poller; exports.PollerCancelledError = PollerCancelledError; exports.PollerStoppedError = PollerStoppedError; @@ -9772,7 +10065,6 @@ exports.saveCache = saveCache; */ Object.defineProperty(exports, "__esModule", { value: true }); exports.DiagAPI = void 0; -var ComponentLogger_1 = __webpack_require__(362); var logLevelLogger_1 = __webpack_require__(673); var types_1 = __webpack_require__(360); var global_utils_1 = __webpack_require__(525); @@ -9803,7 +10095,7 @@ var DiagAPI = /** @class */ (function () { var self = this; // DiagAPI specific functions self.setLogger = function (logger, logLevel) { - var _a, _b; + var _a; if (logLevel === void 0) { logLevel = types_1.DiagLogLevel.INFO; } if (logger === self) { // There isn't much we can do here. @@ -9811,23 +10103,12 @@ var DiagAPI = /** @class */ (function () { // Try to log to self. If a logger was previously registered it will receive the log. var err = new Error('Cannot use diag as the logger for itself. Please use a DiagLogger implementation like ConsoleDiagLogger or a custom implementation'); self.error((_a = err.stack) !== null && _a !== void 0 ? _a : err.message); - return false; - } - var oldLogger = global_utils_1.getGlobal('diag'); - var newLogger = logLevelLogger_1.createLogLevelDiagLogger(logLevel, logger); - // There already is an logger registered. We'll let it know before overwriting it. - if (oldLogger) { - var stack = (_b = new Error().stack) !== null && _b !== void 0 ? _b : ''; - oldLogger.warn("Current logger will be overwritten from " + stack); - newLogger.warn("Current logger will overwrite one already registered from " + stack); + return; } - return global_utils_1.registerGlobal('diag', newLogger, self, true); + global_utils_1.registerGlobal('diag', logLevelLogger_1.createLogLevelDiagLogger(logLevel, logger), true); }; self.disable = function () { - global_utils_1.unregisterGlobal(API_NAME, self); - }; - self.createComponentLogger = function (options) { - return new ComponentLogger_1.DiagComponentLogger(options); + global_utils_1.unregisterGlobal(API_NAME); }; self.verbose = _logProxy('verbose'); self.debug = _logProxy('debug'); @@ -10424,7 +10705,7 @@ module.exports = require("child_process"); /* 130 */, /* 131 */, /* 132 */ -/***/ (function(__unusedmodule, exports) { +/***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; @@ -10444,15 +10725,109 @@ module.exports = require("child_process"); * limitations under the License. */ Object.defineProperty(exports, "__esModule", { value: true }); -exports.ROOT_CONTEXT = exports.createContextKey = void 0; +exports.ROOT_CONTEXT = exports.createContextKey = exports.setBaggage = exports.getBaggage = exports.isInstrumentationSuppressed = exports.unsuppressInstrumentation = exports.suppressInstrumentation = exports.getSpanContext = exports.setSpanContext = exports.setSpan = exports.getSpan = void 0; +var NoopSpan_1 = __webpack_require__(767); +/** + * span key + */ +var SPAN_KEY = createContextKey('OpenTelemetry Context Key SPAN'); +/** + * Shared key for indicating if instrumentation should be suppressed beyond + * this current scope. + */ +var SUPPRESS_INSTRUMENTATION_KEY = createContextKey('OpenTelemetry Context Key SUPPRESS_INSTRUMENTATION'); +/** + * Baggage key + */ +var BAGGAGE_KEY = createContextKey('OpenTelemetry Baggage Key'); +/** + * Return the span if one exists + * + * @param context context to get span from + */ +function getSpan(context) { + return context.getValue(SPAN_KEY) || undefined; +} +exports.getSpan = getSpan; +/** + * Set the span on a context + * + * @param context context to use as parent + * @param span span to set active + */ +function setSpan(context, span) { + return context.setValue(SPAN_KEY, span); +} +exports.setSpan = setSpan; +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context context to set active span on + * @param spanContext span context to be wrapped + */ +function setSpanContext(context, spanContext) { + return setSpan(context, new NoopSpan_1.NoopSpan(spanContext)); +} +exports.setSpanContext = setSpanContext; +/** + * Get the span context of the span if it exists. + * + * @param context context to get values from + */ +function getSpanContext(context) { + var _a; + return (_a = getSpan(context)) === null || _a === void 0 ? void 0 : _a.context(); +} +exports.getSpanContext = getSpanContext; +/** + * Sets value on context to indicate that instrumentation should + * be suppressed beyond this current scope. + * + * @param context context to set the suppress instrumentation value on. + */ +function suppressInstrumentation(context) { + return context.setValue(SUPPRESS_INSTRUMENTATION_KEY, true); +} +exports.suppressInstrumentation = suppressInstrumentation; +/** + * Sets value on context to indicate that instrumentation should + * no-longer be suppressed beyond this current scope. + * + * @param context context to set the suppress instrumentation value on. + */ +function unsuppressInstrumentation(context) { + return context.setValue(SUPPRESS_INSTRUMENTATION_KEY, false); +} +exports.unsuppressInstrumentation = unsuppressInstrumentation; +/** + * Return current suppress instrumentation value for the given context, + * if it exists. + * + * @param context context check for the suppress instrumentation value. + */ +function isInstrumentationSuppressed(context) { + return Boolean(context.getValue(SUPPRESS_INSTRUMENTATION_KEY)); +} +exports.isInstrumentationSuppressed = isInstrumentationSuppressed; +/** + * @param {Context} Context that manage all context values + * @returns {Baggage} Extracted baggage from the context + */ +function getBaggage(context) { + return context.getValue(BAGGAGE_KEY) || undefined; +} +exports.getBaggage = getBaggage; +/** + * @param {Context} Context that manage all context values + * @param {Baggage} baggage that will be set in the actual context + */ +function setBaggage(context, baggage) { + return context.setValue(BAGGAGE_KEY, baggage); +} +exports.setBaggage = setBaggage; /** Get a key to uniquely identify a context value */ function createContextKey(description) { - // The specification states that for the same input, multiple calls should - // return different keys. Due to the nature of the JS dependency management - // system, this creates problems where multiple versions of some package - // could hold different keys for the same property. - // - // Therefore, we use Symbol.for which returns the same key for the same input. return Symbol.for(description); } exports.createContextKey = createContextKey; @@ -10508,7 +10883,7 @@ exports.ROOT_CONTEXT = new BaseContext(); Object.defineProperty(exports, "__esModule", { value: true }); exports.VERSION = void 0; // this is autogenerated file, see scripts/version-update.js -exports.VERSION = '1.0.2'; +exports.VERSION = '1.0.0-rc.0'; //# sourceMappingURL=version.js.map /***/ }), @@ -11447,104 +11822,314 @@ module.exports = parseOptions /***/ }), /* 144 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; +/***/ (function(module) { -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.LocalDistribution = void 0; -const tc = __importStar(__webpack_require__(139)); -const core = __importStar(__webpack_require__(470)); -const fs_1 = __importDefault(__webpack_require__(747)); -const path_1 = __importDefault(__webpack_require__(622)); -const base_installer_1 = __webpack_require__(83); -const util_1 = __webpack_require__(322); -const constants_1 = __webpack_require__(211); -class LocalDistribution extends base_installer_1.JavaBase { - constructor(installerOptions, jdkFile) { - super('jdkfile', installerOptions); - this.jdkFile = jdkFile; - } - setupJava() { - return __awaiter(this, void 0, void 0, function* () { - let foundJava = this.findInToolcache(); - if (foundJava) { - core.info(`Resolved Java ${foundJava.version} from tool-cache`); - } - else { - core.info(`Java ${this.version} was not found in tool-cache. Trying to unpack JDK file...`); - if (!this.jdkFile) { - throw new Error("'jdkFile' is not specified"); - } - const jdkFilePath = path_1.default.resolve(this.jdkFile); - const stats = fs_1.default.statSync(jdkFilePath); - if (!stats.isFile()) { - throw new Error(`JDK file was not found in path '${jdkFilePath}'`); - } - core.info(`Extracting Java from '${jdkFilePath}'`); - const extractedJavaPath = yield util_1.extractJdkFile(jdkFilePath); - const archiveName = fs_1.default.readdirSync(extractedJavaPath)[0]; - const archivePath = path_1.default.join(extractedJavaPath, archiveName); - const javaVersion = this.version; - let javaPath = yield tc.cacheDir(archivePath, this.toolcacheFolderName, this.getToolcacheVersionName(javaVersion), this.architecture); - // for different Java distributions, postfix can exist or not so need to check both cases - if (process.platform === 'darwin' && - fs_1.default.existsSync(path_1.default.join(javaPath, constants_1.MACOS_JAVA_CONTENT_POSTFIX))) { - javaPath = path_1.default.join(javaPath, constants_1.MACOS_JAVA_CONTENT_POSTFIX); - } - foundJava = { - version: javaVersion, - path: javaPath - }; - } - core.info(`Setting Java ${foundJava.version} as default`); - this.setJavaDefault(foundJava.version, foundJava.path); - return foundJava; - }); - } - findPackageForDownload(version) { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('This method should not be implemented in local file provider'); - }); - } - downloadTool(javaRelease) { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('This method should not be implemented in local file provider'); - }); - } -} -exports.LocalDistribution = LocalDistribution; +/*! ***************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, System, Reflect, Promise */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __createBinding; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if ( true && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || from); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); +}); /***/ }), @@ -13944,10 +14529,9 @@ module.exports = require("punycode"); * limitations under the License. */ Object.defineProperty(exports, "__esModule", { value: true }); -exports.NoopTracer = void 0; -var __1 = __webpack_require__(440); -var context_utils_1 = __webpack_require__(969); -var NonRecordingSpan_1 = __webpack_require__(437); +exports.NOOP_TRACER = exports.NoopTracer = void 0; +var context_1 = __webpack_require__(132); +var NoopSpan_1 = __webpack_require__(767); var spancontext_utils_1 = __webpack_require__(453); /** * No-op implementations of {@link Tracer}. @@ -13959,40 +14543,16 @@ var NoopTracer = /** @class */ (function () { NoopTracer.prototype.startSpan = function (name, options, context) { var root = Boolean(options === null || options === void 0 ? void 0 : options.root); if (root) { - return new NonRecordingSpan_1.NonRecordingSpan(); + return new NoopSpan_1.NoopSpan(); } - var parentFromContext = context && context_utils_1.getSpanContext(context); + var parentFromContext = context && context_1.getSpanContext(context); if (isSpanContext(parentFromContext) && spancontext_utils_1.isSpanContextValid(parentFromContext)) { - return new NonRecordingSpan_1.NonRecordingSpan(parentFromContext); - } - else { - return new NonRecordingSpan_1.NonRecordingSpan(); - } - }; - NoopTracer.prototype.startActiveSpan = function (name, arg2, arg3, arg4) { - var opts; - var ctx; - var fn; - if (arguments.length < 2) { - return; - } - else if (arguments.length === 2) { - fn = arg2; - } - else if (arguments.length === 3) { - opts = arg2; - fn = arg3; + return new NoopSpan_1.NoopSpan(parentFromContext); } else { - opts = arg2; - ctx = arg3; - fn = arg4; + return new NoopSpan_1.NoopSpan(); } - var parentContext = ctx !== null && ctx !== void 0 ? ctx : __1.context.active(); - var span = this.startSpan(name, opts, parentContext); - var contextWithSpanSet = context_utils_1.setSpan(parentContext, span); - return __1.context.with(contextWithSpanSet, fn, undefined, span); }; return NoopTracer; }()); @@ -14003,6 +14563,7 @@ function isSpanContext(spanContext) { typeof spanContext['traceId'] === 'string' && typeof spanContext['traceFlags'] === 'number'); } +exports.NOOP_TRACER = new NoopTracer(); //# sourceMappingURL=NoopTracer.js.map /***/ }), @@ -14070,7 +14631,7 @@ Object.defineProperty(exports, "__esModule", { value: true }); * limitations under the License. */ Object.defineProperty(exports, "__esModule", { value: true }); -exports.NoopTracerProvider = void 0; +exports.NOOP_TRACER_PROVIDER = exports.NoopTracerProvider = void 0; var NoopTracer_1 = __webpack_require__(216); /** * An implementation of the {@link TracerProvider} which returns an impotent @@ -14082,11 +14643,12 @@ var NoopTracerProvider = /** @class */ (function () { function NoopTracerProvider() { } NoopTracerProvider.prototype.getTracer = function (_name, _version) { - return new NoopTracer_1.NoopTracer(); + return NoopTracer_1.NOOP_TRACER; }; return NoopTracerProvider; }()); exports.NoopTracerProvider = NoopTracerProvider; +exports.NOOP_TRACER_PROVIDER = new NoopTracerProvider(); //# sourceMappingURL=NoopTracerProvider.js.map /***/ }), @@ -15944,7 +16506,7 @@ var __createBinding; ar[i] = from[i]; } } - return to.concat(ar || Array.prototype.slice.call(from)); + return to.concat(ar || from); }; __await = function (v) { @@ -18414,8 +18976,7 @@ function restore(id) { core.debug(`primary key is ${primaryKey}`); core.saveState(STATE_CACHE_PRIMARY_KEY, primaryKey); if (primaryKey.endsWith('-')) { - core.warning(`No file in ${process.cwd()} matched to [${packageManager.pattern}], make sure you have checked out the target repository`); - return; + throw new Error(`No file in ${process.cwd()} matched to [${packageManager.pattern}], make sure you have checked out the target repository`); } const matchedKey = yield cache.restoreCache(packageManager.path, primaryKey, [ `${CACHE_KEY_PREFIX}-${process.env['RUNNER_OS']}-${id}` @@ -18844,6 +19405,7 @@ Object.defineProperty(exports, '__esModule', { value: true }); function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } +var tslib = __webpack_require__(909); var uuid = __webpack_require__(585); var tough = __webpack_require__(929); var http = __webpack_require__(605); @@ -18856,7 +19418,6 @@ var url = __webpack_require__(835); var stream = __webpack_require__(794); var logger$1 = __webpack_require__(492); var tunnel = __webpack_require__(856); -var tslib = __webpack_require__(909); var coreAuth = __webpack_require__(229); var xml2js = __webpack_require__(992); var os = __webpack_require__(87); @@ -18873,7 +19434,7 @@ function getHeaderKey(headerName) { } function isHttpHeadersLike(object) { if (object && typeof object === "object") { - const castObject = object; + var castObject = object; if (typeof castObject.rawHeaders === "function" && typeof castObject.clone === "function" && typeof castObject.get === "function" && @@ -18892,11 +19453,11 @@ function isHttpHeadersLike(object) { /** * A collection of HTTP header key/value pairs. */ -class HttpHeaders { - constructor(rawHeaders) { +var HttpHeaders = /** @class */ (function () { + function HttpHeaders(rawHeaders) { this._headersMap = {}; if (rawHeaders) { - for (const headerName in rawHeaders) { + for (var headerName in rawHeaders) { this.set(headerName, rawHeaders[headerName]); } } @@ -18907,99 +19468,100 @@ class HttpHeaders { * @param headerName - The name of the header to set. This value is case-insensitive. * @param headerValue - The value of the header to set. */ - set(headerName, headerValue) { + HttpHeaders.prototype.set = function (headerName, headerValue) { this._headersMap[getHeaderKey(headerName)] = { name: headerName, value: headerValue.toString() }; - } + }; /** * Get the header value for the provided header name, or undefined if no header exists in this * collection with the provided name. * @param headerName - The name of the header. */ - get(headerName) { - const header = this._headersMap[getHeaderKey(headerName)]; + HttpHeaders.prototype.get = function (headerName) { + var header = this._headersMap[getHeaderKey(headerName)]; return !header ? undefined : header.value; - } + }; /** * Get whether or not this header collection contains a header entry for the provided header name. */ - contains(headerName) { + HttpHeaders.prototype.contains = function (headerName) { return !!this._headersMap[getHeaderKey(headerName)]; - } + }; /** * Remove the header with the provided headerName. Return whether or not the header existed and * was removed. * @param headerName - The name of the header to remove. */ - remove(headerName) { - const result = this.contains(headerName); + HttpHeaders.prototype.remove = function (headerName) { + var result = this.contains(headerName); delete this._headersMap[getHeaderKey(headerName)]; return result; - } + }; /** * Get the headers that are contained this collection as an object. */ - rawHeaders() { - const result = {}; - for (const headerKey in this._headersMap) { - const header = this._headersMap[headerKey]; + HttpHeaders.prototype.rawHeaders = function () { + var result = {}; + for (var headerKey in this._headersMap) { + var header = this._headersMap[headerKey]; result[header.name.toLowerCase()] = header.value; } return result; - } + }; /** * Get the headers that are contained in this collection as an array. */ - headersArray() { - const headers = []; - for (const headerKey in this._headersMap) { + HttpHeaders.prototype.headersArray = function () { + var headers = []; + for (var headerKey in this._headersMap) { headers.push(this._headersMap[headerKey]); } return headers; - } + }; /** * Get the header names that are contained in this collection. */ - headerNames() { - const headerNames = []; - const headers = this.headersArray(); - for (let i = 0; i < headers.length; ++i) { + HttpHeaders.prototype.headerNames = function () { + var headerNames = []; + var headers = this.headersArray(); + for (var i = 0; i < headers.length; ++i) { headerNames.push(headers[i].name); } return headerNames; - } + }; /** * Get the header values that are contained in this collection. */ - headerValues() { - const headerValues = []; - const headers = this.headersArray(); - for (let i = 0; i < headers.length; ++i) { + HttpHeaders.prototype.headerValues = function () { + var headerValues = []; + var headers = this.headersArray(); + for (var i = 0; i < headers.length; ++i) { headerValues.push(headers[i].value); } return headerValues; - } + }; /** * Get the JSON object representation of this HTTP header collection. */ - toJson() { + HttpHeaders.prototype.toJson = function () { return this.rawHeaders(); - } + }; /** * Get the string representation of this HTTP header collection. */ - toString() { + HttpHeaders.prototype.toString = function () { return JSON.stringify(this.toJson()); - } + }; /** * Create a deep clone/copy of this HttpHeaders collection. */ - clone() { + HttpHeaders.prototype.clone = function () { return new HttpHeaders(this.rawHeaders()); - } -} + }; + return HttpHeaders; +}()); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. @@ -19017,7 +19579,7 @@ function encodeString(value) { function encodeByteArray(value) { // Buffer.from accepts | -- the TypeScript definition is off here // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length - const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); + var bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); return bufferValue.toString("base64"); } /** @@ -19030,11 +19592,11 @@ function decodeString(value) { // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -const Constants = { +var Constants = { /** * The core-http version */ - coreHttpVersion: "2.1.0", + coreHttpVersion: "1.2.6", /** * Specifies HTTP. */ @@ -19073,8 +19635,7 @@ const Constants = { PATCH: "PATCH" }, StatusCodes: { - TooManyRequests: 429, - ServiceUnavailable: 503 + TooManyRequests: 429 } }, /** @@ -19104,18 +19665,18 @@ const Constants = { /** * Default key used to access the XML attributes. */ -const XML_ATTRKEY = "$"; +var XML_ATTRKEY = "$"; /** * Default key used to access the XML value content. */ -const XML_CHARKEY = "_"; +var XML_CHARKEY = "_"; // Copyright (c) Microsoft Corporation. -const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; +var validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; /** * A constant that indicates whether the environment is node.js or browser based. */ -const isNode = typeof process !== "undefined" && +var isNode = typeof process !== "undefined" && !!process.version && !!process.versions && !!process.versions.node; @@ -19141,7 +19702,7 @@ function encodeUri(uri) { * @returns The stripped version of Http Response. */ function stripResponse(response) { - const strippedResponse = {}; + var strippedResponse = {}; strippedResponse.body = response.bodyAsText; strippedResponse.headers = response.headers; strippedResponse.status = response.status; @@ -19155,7 +19716,7 @@ function stripResponse(response) { * @returns The stripped version of Http Request. */ function stripRequest(request) { - const strippedRequest = request.clone(); + var strippedRequest = request.clone(); if (strippedRequest.headers) { strippedRequest.headers.remove("authorization"); } @@ -19188,12 +19749,21 @@ function generateUuid() { * @returns A chain of resolved or rejected promises */ function executePromisesSequentially(promiseFactories, kickstart) { - let result = Promise.resolve(kickstart); - promiseFactories.forEach((promiseFactory) => { + var result = Promise.resolve(kickstart); + promiseFactories.forEach(function (promiseFactory) { result = result.then(promiseFactory); }); return result; } +/** + * A wrapper for setTimeout that resolves a promise after t milliseconds. + * @param t - The number of milliseconds to be delayed. + * @param value - The value to be resolved with after a timeout of t milliseconds. + * @returns Resolved promise + */ +function delay(t, value) { + return new Promise(function (resolve) { return setTimeout(function () { return resolve(value); }, t); }); +} /** * Converts a Promise to a callback. * @param promise - The Promise to be converted to a callback @@ -19206,13 +19776,13 @@ function promiseToCallback(promise) { throw new Error("The provided input is not a Promise."); } // eslint-disable-next-line @typescript-eslint/ban-types - return (cb) => { + return function (cb) { promise - .then((data) => { + .then(function (data) { // eslint-disable-next-line promise/no-callback-in-promise return cb(undefined, data); }) - .catch((err) => { + .catch(function (err) { // eslint-disable-next-line promise/no-callback-in-promise cb(err); }); @@ -19227,25 +19797,26 @@ function promiseToServiceCallback(promise) { if (typeof promise.then !== "function") { throw new Error("The provided input is not a Promise."); } - return (cb) => { + return function (cb) { promise - .then((data) => { + .then(function (data) { return process.nextTick(cb, undefined, data.parsedBody, data.request, data); }) - .catch((err) => { + .catch(function (err) { process.nextTick(cb, err); }); }; } function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { + var _a, _b, _c; if (!Array.isArray(obj)) { obj = [obj]; } if (!xmlNamespaceKey || !xmlNamespace) { - return { [elementName]: obj }; + return _a = {}, _a[elementName] = obj, _a; } - const result = { [elementName]: obj }; - result[XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace }; + var result = (_b = {}, _b[elementName] = obj, _b); + result[XML_ATTRKEY] = (_c = {}, _c[xmlNamespaceKey] = xmlNamespace, _c); return result; } /** @@ -19254,14 +19825,14 @@ function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { * @param sourceCtors - An array of source objects from which the properties need to be taken. */ function applyMixins(targetCtorParam, sourceCtors) { - const castTargetCtorParam = targetCtorParam; - sourceCtors.forEach((sourceCtor) => { - Object.getOwnPropertyNames(sourceCtor.prototype).forEach((name) => { + var castTargetCtorParam = targetCtorParam; + sourceCtors.forEach(function (sourceCtor) { + Object.getOwnPropertyNames(sourceCtor.prototype).forEach(function (name) { castTargetCtorParam.prototype[name] = sourceCtor.prototype[name]; }); }); } -const validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; +var validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; /** * Indicates whether the given string is in ISO 8601 format. * @param value - The value to be validated for ISO 8601 duration format. @@ -19311,18 +19882,19 @@ function isObject(input) { } // Copyright (c) Microsoft Corporation. -class Serializer { - constructor(modelMappers = {}, isXML) { +var Serializer = /** @class */ (function () { + function Serializer(modelMappers, isXML) { + if (modelMappers === void 0) { modelMappers = {}; } this.modelMappers = modelMappers; this.isXML = isXML; } - validateConstraints(mapper, value, objectName) { - const failValidation = (constraintName, constraintValue) => { - throw new Error(`"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.`); + Serializer.prototype.validateConstraints = function (mapper, value, objectName) { + var failValidation = function (constraintName, constraintValue) { + throw new Error("\"" + objectName + "\" with value \"" + value + "\" should satisfy the constraint \"" + constraintName + "\": " + constraintValue + "."); }; if (mapper.constraints && value != undefined) { - const valueAsNumber = value; - const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems } = mapper.constraints; + var valueAsNumber = value; + var _a = mapper.constraints, ExclusiveMaximum = _a.ExclusiveMaximum, ExclusiveMinimum = _a.ExclusiveMinimum, InclusiveMaximum = _a.InclusiveMaximum, InclusiveMinimum = _a.InclusiveMinimum, MaxItems = _a.MaxItems, MaxLength = _a.MaxLength, MinItems = _a.MinItems, MinLength = _a.MinLength, MultipleOf = _a.MultipleOf, Pattern = _a.Pattern, UniqueItems = _a.UniqueItems; if (ExclusiveMaximum != undefined && valueAsNumber >= ExclusiveMaximum) { failValidation("ExclusiveMaximum", ExclusiveMaximum); } @@ -19335,7 +19907,7 @@ class Serializer { if (InclusiveMinimum != undefined && valueAsNumber < InclusiveMinimum) { failValidation("InclusiveMinimum", InclusiveMinimum); } - const valueAsArray = value; + var valueAsArray = value; if (MaxItems != undefined && valueAsArray.length > MaxItems) { failValidation("MaxItems", MaxItems); } @@ -19352,17 +19924,17 @@ class Serializer { failValidation("MultipleOf", MultipleOf); } if (Pattern) { - const pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; + var pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; if (typeof value !== "string" || value.match(pattern) === null) { failValidation("Pattern", Pattern); } } if (UniqueItems && - valueAsArray.some((item, i, ar) => ar.indexOf(item) !== i)) { + valueAsArray.some(function (item, i, ar) { return ar.indexOf(item) !== i; })) { failValidation("UniqueItems", UniqueItems); } } - } + }; /** * Serialize the given object based on its metadata defined in the mapper * @@ -19372,15 +19944,16 @@ class Serializer { * @param options - additional options to deserialization * @returns A valid serialized Javascript object */ - serialize(mapper, object, objectName, options = {}) { + Serializer.prototype.serialize = function (mapper, object, objectName, options) { var _a, _b, _c; - const updatedOptions = { + if (options === void 0) { options = {}; } + var updatedOptions = { rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY }; - let payload = {}; - const mapperType = mapper.type.name; + var payload = {}; + var mapperType = mapper.type.name; if (!objectName) { objectName = mapper.serializedName; } @@ -19399,15 +19972,15 @@ class Serializer { // true || null | undefined/null // false || X | undefined // undefined || X | undefined/null - const { required, nullable } = mapper; + var required = mapper.required, nullable = mapper.nullable; if (required && nullable && object === undefined) { - throw new Error(`${objectName} cannot be undefined.`); + throw new Error(objectName + " cannot be undefined."); } if (required && !nullable && object == undefined) { - throw new Error(`${objectName} cannot be null or undefined.`); + throw new Error(objectName + " cannot be null or undefined."); } if (!required && nullable === false && object === null) { - throw new Error(`${objectName} cannot be null.`); + throw new Error(objectName + " cannot be null."); } if (object == undefined) { payload = object; @@ -19422,7 +19995,7 @@ class Serializer { payload = serializeBasicTypes(mapperType, objectName, object); } else if (mapperType.match(/^Enum$/i) !== null) { - const enumMapper = mapper; + var enumMapper = mapper; payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); } else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null) { @@ -19445,7 +20018,7 @@ class Serializer { } } return payload; - } + }; /** * Deserialize the given object based on its metadata defined in the mapper * @@ -19455,9 +20028,10 @@ class Serializer { * @param options - Controls behavior of XML parser and builder. * @returns A valid deserialized Javascript object */ - deserialize(mapper, responseBody, objectName, options = {}) { + Serializer.prototype.deserialize = function (mapper, responseBody, objectName, options) { var _a, _b, _c; - const updatedOptions = { + if (options === void 0) { options = {}; } + var updatedOptions = { rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY @@ -19475,8 +20049,8 @@ class Serializer { } return responseBody; } - let payload; - const mapperType = mapper.type.name; + var payload; + var mapperType = mapper.type.name; if (!objectName) { objectName = mapper.serializedName; } @@ -19485,8 +20059,8 @@ class Serializer { } else { if (this.isXML) { - const xmlCharKey = updatedOptions.xmlCharKey; - const castResponseBody = responseBody; + var xmlCharKey = updatedOptions.xmlCharKey; + var castResponseBody = responseBody; /** * If the mapper specifies this as a non-composite type value but the responseBody contains * both header ("$" i.e., XML_ATTRKEY) and body ("#" i.e., XML_CHARKEY) properties, @@ -19540,10 +20114,11 @@ class Serializer { payload = mapper.defaultValue; } return payload; - } -} + }; + return Serializer; +}()); function trimEnd(str, ch) { - let len = str.length; + var len = str.length; while (len - 1 >= 0 && str[len - 1] === ch) { --len; } @@ -19554,10 +20129,10 @@ function bufferToBase64Url(buffer) { return undefined; } if (!(buffer instanceof Uint8Array)) { - throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`); + throw new Error("Please provide an input of type Uint8Array for converting to Base64Url."); } // Uint8Array to Base64. - const str = encodeByteArray(buffer); + var str = encodeByteArray(buffer); // Base64 to Base64Url. return trimEnd(str, "=") .replace(/\+/g, "-") @@ -19576,11 +20151,12 @@ function base64UrlToByteArray(str) { return decodeString(str); } function splitSerializeName(prop) { - const classes = []; - let partialclass = ""; + var classes = []; + var partialclass = ""; if (prop) { - const subwords = prop.split("."); - for (const item of subwords) { + var subwords = prop.split("."); + for (var _i = 0, subwords_1 = subwords; _i < subwords_1.length; _i++) { + var item = subwords_1[_i]; if (item.charAt(item.length - 1) === "\\") { partialclass += item.substr(0, item.length - 1) + "."; } @@ -19612,32 +20188,32 @@ function serializeBasicTypes(typeName, objectName, value) { if (value !== null && value !== undefined) { if (typeName.match(/^Number$/i) !== null) { if (typeof value !== "number") { - throw new Error(`${objectName} with value ${value} must be of type number.`); + throw new Error(objectName + " with value " + value + " must be of type number."); } } else if (typeName.match(/^String$/i) !== null) { if (typeof value.valueOf() !== "string") { - throw new Error(`${objectName} with value "${value}" must be of type string.`); + throw new Error(objectName + " with value \"" + value + "\" must be of type string."); } } else if (typeName.match(/^Uuid$/i) !== null) { if (!(typeof value.valueOf() === "string" && isValidUuid(value))) { - throw new Error(`${objectName} with value "${value}" must be of type string and a valid uuid.`); + throw new Error(objectName + " with value \"" + value + "\" must be of type string and a valid uuid."); } } else if (typeName.match(/^Boolean$/i) !== null) { if (typeof value !== "boolean") { - throw new Error(`${objectName} with value ${value} must be of type boolean.`); + throw new Error(objectName + " with value " + value + " must be of type boolean."); } } else if (typeName.match(/^Stream$/i) !== null) { - const objectType = typeof value; + var objectType = typeof value; if (objectType !== "string" && objectType !== "function" && !(value instanceof ArrayBuffer) && !ArrayBuffer.isView(value) && !((typeof Blob === "function" || typeof Blob === "object") && value instanceof Blob)) { - throw new Error(`${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.`); + throw new Error(objectName + " must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream."); } } } @@ -19645,34 +20221,34 @@ function serializeBasicTypes(typeName, objectName, value) { } function serializeEnumType(objectName, allowedValues, value) { if (!allowedValues) { - throw new Error(`Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`); + throw new Error("Please provide a set of allowedValues to validate " + objectName + " as an Enum Type."); } - const isPresent = allowedValues.some((item) => { + var isPresent = allowedValues.some(function (item) { if (typeof item.valueOf() === "string") { return item.toLowerCase() === value.toLowerCase(); } return item === value; }); if (!isPresent) { - throw new Error(`${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(allowedValues)}.`); + throw new Error(value + " is not a valid value for " + objectName + ". The valid values are: " + JSON.stringify(allowedValues) + "."); } return value; } function serializeByteArrayType(objectName, value) { - let returnValue = ""; + var returnValue = ""; if (value != undefined) { if (!(value instanceof Uint8Array)) { - throw new Error(`${objectName} must be of type Uint8Array.`); + throw new Error(objectName + " must be of type Uint8Array."); } returnValue = encodeByteArray(value); } return returnValue; } function serializeBase64UrlType(objectName, value) { - let returnValue = ""; + var returnValue = ""; if (value != undefined) { if (!(value instanceof Uint8Array)) { - throw new Error(`${objectName} must be of type Uint8Array.`); + throw new Error(objectName + " must be of type Uint8Array."); } returnValue = bufferToBase64Url(value) || ""; } @@ -19683,7 +20259,7 @@ function serializeDateTypes(typeName, value, objectName) { if (typeName.match(/^Date$/i) !== null) { if (!(value instanceof Date || (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { - throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + throw new Error(objectName + " must be an instanceof Date or a string in ISO8601 format."); } value = value instanceof Date @@ -19693,57 +20269,58 @@ function serializeDateTypes(typeName, value, objectName) { else if (typeName.match(/^DateTime$/i) !== null) { if (!(value instanceof Date || (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { - throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + throw new Error(objectName + " must be an instanceof Date or a string in ISO8601 format."); } value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); } else if (typeName.match(/^DateTimeRfc1123$/i) !== null) { if (!(value instanceof Date || (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { - throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`); + throw new Error(objectName + " must be an instanceof Date or a string in RFC-1123 format."); } value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); } else if (typeName.match(/^UnixTime$/i) !== null) { if (!(value instanceof Date || (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { - throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` + - `for it to be serialized in UnixTime/Epoch format.`); + throw new Error(objectName + " must be an instanceof Date or a string in RFC-1123/ISO8601 format " + + "for it to be serialized in UnixTime/Epoch format."); } value = dateToUnixTime(value); } else if (typeName.match(/^TimeSpan$/i) !== null) { if (!isDuration(value)) { - throw new Error(`${objectName} must be a string in ISO 8601 format. Instead was "${value}".`); + throw new Error(objectName + " must be a string in ISO 8601 format. Instead was \"" + value + "\"."); } } } return value; } function serializeSequenceType(serializer, mapper, object, objectName, isXml, options) { + var _a, _b; if (!Array.isArray(object)) { - throw new Error(`${objectName} must be of type Array.`); + throw new Error(objectName + " must be of type Array."); } - const elementType = mapper.type.element; + var elementType = mapper.type.element; if (!elementType || typeof elementType !== "object") { - throw new Error(`element" metadata for an Array must be defined in the ` + - `mapper and it must of type "object" in ${objectName}.`); + throw new Error("element\" metadata for an Array must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName + ".")); } - const tempArray = []; - for (let i = 0; i < object.length; i++) { - const serializedValue = serializer.serialize(elementType, object[i], objectName, options); + var tempArray = []; + for (var i = 0; i < object.length; i++) { + var serializedValue = serializer.serialize(elementType, object[i], objectName, options); if (isXml && elementType.xmlNamespace) { - const xmlnsKey = elementType.xmlNamespacePrefix - ? `xmlns:${elementType.xmlNamespacePrefix}` + var xmlnsKey = elementType.xmlNamespacePrefix + ? "xmlns:" + elementType.xmlNamespacePrefix : "xmlns"; if (elementType.type.name === "Composite") { - tempArray[i] = Object.assign({}, serializedValue); - tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + tempArray[i] = tslib.__assign({}, serializedValue); + tempArray[i][XML_ATTRKEY] = (_a = {}, _a[xmlnsKey] = elementType.xmlNamespace, _a); } else { tempArray[i] = {}; tempArray[i][options.xmlCharKey] = serializedValue; - tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + tempArray[i][XML_ATTRKEY] = (_b = {}, _b[xmlnsKey] = elementType.xmlNamespace, _b); } } else { @@ -19753,25 +20330,27 @@ function serializeSequenceType(serializer, mapper, object, objectName, isXml, op return tempArray; } function serializeDictionaryType(serializer, mapper, object, objectName, isXml, options) { + var _a; if (typeof object !== "object") { - throw new Error(`${objectName} must be of type object.`); + throw new Error(objectName + " must be of type object."); } - const valueType = mapper.type.value; + var valueType = mapper.type.value; if (!valueType || typeof valueType !== "object") { - throw new Error(`"value" metadata for a Dictionary must be defined in the ` + - `mapper and it must of type "object" in ${objectName}.`); + throw new Error("\"value\" metadata for a Dictionary must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName + ".")); } - const tempDictionary = {}; - for (const key of Object.keys(object)) { - const serializedValue = serializer.serialize(valueType, object[key], objectName, options); + var tempDictionary = {}; + for (var _i = 0, _b = Object.keys(object); _i < _b.length; _i++) { + var key = _b[_i]; + var serializedValue = serializer.serialize(valueType, object[key], objectName, options); // If the element needs an XML namespace we need to add it within the $ property tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options); } // Add the namespace to the root element if needed if (isXml && mapper.xmlNamespace) { - const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : "xmlns"; - const result = tempDictionary; - result[XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace }; + var xmlnsKey = mapper.xmlNamespacePrefix ? "xmlns:" + mapper.xmlNamespacePrefix : "xmlns"; + var result = tempDictionary; + result[XML_ATTRKEY] = (_a = {}, _a[xmlnsKey] = mapper.xmlNamespace, _a); return result; } return tempDictionary; @@ -19783,9 +20362,9 @@ function serializeDictionaryType(serializer, mapper, object, objectName, isXml, * @param objectName - Name of the object being serialized */ function resolveAdditionalProperties(serializer, mapper, objectName) { - const additionalProperties = mapper.type.additionalProperties; + var additionalProperties = mapper.type.additionalProperties; if (!additionalProperties && mapper.type.className) { - const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + var modelMapper = resolveReferencedMapper(serializer, mapper, objectName); return modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.additionalProperties; } return additionalProperties; @@ -19797,9 +20376,9 @@ function resolveAdditionalProperties(serializer, mapper, objectName) { * @param objectName - Name of the object being serialized */ function resolveReferencedMapper(serializer, mapper, objectName) { - const className = mapper.type.className; + var className = mapper.type.className; if (!className) { - throw new Error(`Class name for model "${objectName}" is not provided in the mapper "${JSON.stringify(mapper, undefined, 2)}".`); + throw new Error("Class name for model \"" + objectName + "\" is not provided in the mapper \"" + JSON.stringify(mapper, undefined, 2) + "\"."); } return serializer.modelMappers[className]; } @@ -19809,34 +20388,36 @@ function resolveReferencedMapper(serializer, mapper, objectName) { * @param mapper - The composite mapper to resolve */ function resolveModelProperties(serializer, mapper, objectName) { - let modelProps = mapper.type.modelProperties; + var modelProps = mapper.type.modelProperties; if (!modelProps) { - const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + var modelMapper = resolveReferencedMapper(serializer, mapper, objectName); if (!modelMapper) { - throw new Error(`mapper() cannot be null or undefined for model "${mapper.type.className}".`); + throw new Error("mapper() cannot be null or undefined for model \"" + mapper.type.className + "\"."); } modelProps = modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.modelProperties; if (!modelProps) { - throw new Error(`modelProperties cannot be null or undefined in the ` + - `mapper "${JSON.stringify(modelMapper)}" of type "${mapper.type.className}" for object "${objectName}".`); + throw new Error("modelProperties cannot be null or undefined in the " + + ("mapper \"" + JSON.stringify(modelMapper) + "\" of type \"" + mapper.type.className + "\" for object \"" + objectName + "\".")); } } return modelProps; } function serializeCompositeType(serializer, mapper, object, objectName, isXml, options) { + var _a, _b; if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); } if (object != undefined) { - const payload = {}; - const modelProps = resolveModelProperties(serializer, mapper, objectName); - for (const key of Object.keys(modelProps)) { - const propertyMapper = modelProps[key]; + var payload = {}; + var modelProps = resolveModelProperties(serializer, mapper, objectName); + for (var _i = 0, _c = Object.keys(modelProps); _i < _c.length; _i++) { + var key = _c[_i]; + var propertyMapper = modelProps[key]; if (propertyMapper.readOnly) { continue; } - let propName; - let parentObject = payload; + var propName = void 0; + var parentObject = payload; if (serializer.isXML) { if (propertyMapper.xmlIsWrapped) { propName = propertyMapper.xmlName; @@ -19846,10 +20427,11 @@ function serializeCompositeType(serializer, mapper, object, objectName, isXml, o } } else { - const paths = splitSerializeName(propertyMapper.serializedName); + var paths = splitSerializeName(propertyMapper.serializedName); propName = paths.pop(); - for (const pathName of paths) { - const childObject = parentObject[pathName]; + for (var _d = 0, paths_1 = paths; _d < paths_1.length; _d++) { + var pathName = paths_1[_d]; + var childObject = parentObject[pathName]; if (childObject == undefined && (object[key] != undefined || propertyMapper.defaultValue !== undefined)) { parentObject[pathName] = {}; @@ -19859,24 +20441,24 @@ function serializeCompositeType(serializer, mapper, object, objectName, isXml, o } if (parentObject != undefined) { if (isXml && mapper.xmlNamespace) { - const xmlnsKey = mapper.xmlNamespacePrefix - ? `xmlns:${mapper.xmlNamespacePrefix}` + var xmlnsKey = mapper.xmlNamespacePrefix + ? "xmlns:" + mapper.xmlNamespacePrefix : "xmlns"; - parentObject[XML_ATTRKEY] = Object.assign(Object.assign({}, parentObject[XML_ATTRKEY]), { [xmlnsKey]: mapper.xmlNamespace }); + parentObject[XML_ATTRKEY] = tslib.__assign(tslib.__assign({}, parentObject[XML_ATTRKEY]), (_a = {}, _a[xmlnsKey] = mapper.xmlNamespace, _a)); } - const propertyObjectName = propertyMapper.serializedName !== "" + var propertyObjectName = propertyMapper.serializedName !== "" ? objectName + "." + propertyMapper.serializedName : objectName; - let toSerialize = object[key]; - const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + var toSerialize = object[key]; + var polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); if (polymorphicDiscriminator && polymorphicDiscriminator.clientName === key && toSerialize == undefined) { toSerialize = mapper.serializedName; } - const serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName, options); + var serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName, options); if (serializedValue !== undefined && propName != undefined) { - const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options); + var value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options); if (isXml && propertyMapper.xmlIsAttribute) { // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js. // This keeps things simple while preventing name collision @@ -19885,7 +20467,7 @@ function serializeCompositeType(serializer, mapper, object, objectName, isXml, o parentObject[XML_ATTRKEY][propName] = serializedValue; } else if (isXml && propertyMapper.xmlIsWrapped) { - parentObject[propName] = { [propertyMapper.xmlElementName]: value }; + parentObject[propName] = (_b = {}, _b[propertyMapper.xmlElementName] = value, _b); } else { parentObject[propName] = value; @@ -19893,14 +20475,17 @@ function serializeCompositeType(serializer, mapper, object, objectName, isXml, o } } } - const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName); + var additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName); if (additionalPropertiesMapper) { - const propNames = Object.keys(modelProps); - for (const clientPropName in object) { - const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName); + var propNames = Object.keys(modelProps); + var _loop_1 = function (clientPropName) { + var isAdditionalProperty = propNames.every(function (pn) { return pn !== clientPropName; }); if (isAdditionalProperty) { payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]', options); } + }; + for (var clientPropName in object) { + _loop_1(clientPropName); } } return payload; @@ -19908,24 +20493,25 @@ function serializeCompositeType(serializer, mapper, object, objectName, isXml, o return object; } function getXmlObjectValue(propertyMapper, serializedValue, isXml, options) { + var _a; if (!isXml || !propertyMapper.xmlNamespace) { return serializedValue; } - const xmlnsKey = propertyMapper.xmlNamespacePrefix - ? `xmlns:${propertyMapper.xmlNamespacePrefix}` + var xmlnsKey = propertyMapper.xmlNamespacePrefix + ? "xmlns:" + propertyMapper.xmlNamespacePrefix : "xmlns"; - const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace }; + var xmlNamespace = (_a = {}, _a[xmlnsKey] = propertyMapper.xmlNamespace, _a); if (["Composite"].includes(propertyMapper.type.name)) { if (serializedValue[XML_ATTRKEY]) { return serializedValue; } else { - const result = Object.assign({}, serializedValue); - result[XML_ATTRKEY] = xmlNamespace; - return result; + var result_1 = tslib.__assign({}, serializedValue); + result_1[XML_ATTRKEY] = xmlNamespace; + return result_1; } } - const result = {}; + var result = {}; result[options.xmlCharKey] = serializedValue; result[XML_ATTRKEY] = xmlNamespace; return result; @@ -19938,22 +20524,24 @@ function deserializeCompositeType(serializer, mapper, responseBody, objectName, if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); } - const modelProps = resolveModelProperties(serializer, mapper, objectName); - let instance = {}; - const handledPropertyNames = []; - for (const key of Object.keys(modelProps)) { - const propertyMapper = modelProps[key]; - const paths = splitSerializeName(modelProps[key].serializedName); + var modelProps = resolveModelProperties(serializer, mapper, objectName); + var instance = {}; + var handledPropertyNames = []; + for (var _i = 0, _b = Object.keys(modelProps); _i < _b.length; _i++) { + var key = _b[_i]; + var propertyMapper = modelProps[key]; + var paths = splitSerializeName(modelProps[key].serializedName); handledPropertyNames.push(paths[0]); - const { serializedName, xmlName, xmlElementName } = propertyMapper; - let propertyObjectName = objectName; + var serializedName = propertyMapper.serializedName, xmlName = propertyMapper.xmlName, xmlElementName = propertyMapper.xmlElementName; + var propertyObjectName = objectName; if (serializedName !== "" && serializedName !== undefined) { propertyObjectName = objectName + "." + serializedName; } - const headerCollectionPrefix = propertyMapper.headerCollectionPrefix; + var headerCollectionPrefix = propertyMapper.headerCollectionPrefix; if (headerCollectionPrefix) { - const dictionary = {}; - for (const headerKey of Object.keys(responseBody)) { + var dictionary = {}; + for (var _c = 0, _d = Object.keys(responseBody); _c < _d.length; _c++) { + var headerKey = _d[_c]; if (headerKey.startsWith(headerCollectionPrefix)) { dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName, options); } @@ -19966,7 +20554,7 @@ function deserializeCompositeType(serializer, mapper, responseBody, objectName, instance[key] = serializer.deserialize(propertyMapper, responseBody[XML_ATTRKEY][xmlName], propertyObjectName, options); } else { - const propertyName = xmlElementName || xmlName || serializedName; + var propertyName = xmlElementName || xmlName || serializedName; if (propertyMapper.xmlIsWrapped) { /* a list of wrapped by For the xml example below @@ -19982,28 +20570,29 @@ function deserializeCompositeType(serializer, mapper, responseBody, objectName, } xmlName is "Cors" and xmlElementName is"CorsRule". */ - const wrapped = responseBody[xmlName]; - const elementList = (_a = wrapped === null || wrapped === void 0 ? void 0 : wrapped[xmlElementName]) !== null && _a !== void 0 ? _a : []; + var wrapped = responseBody[xmlName]; + var elementList = (_a = wrapped === null || wrapped === void 0 ? void 0 : wrapped[xmlElementName]) !== null && _a !== void 0 ? _a : []; instance[key] = serializer.deserialize(propertyMapper, elementList, propertyObjectName, options); } else { - const property = responseBody[propertyName]; + var property = responseBody[propertyName]; instance[key] = serializer.deserialize(propertyMapper, property, propertyObjectName, options); } } } else { // deserialize the property if it is present in the provided responseBody instance - let propertyInstance; - let res = responseBody; + var propertyInstance = void 0; + var res = responseBody; // traversing the object step by step. - for (const item of paths) { + for (var _e = 0, paths_2 = paths; _e < paths_2.length; _e++) { + var item = paths_2[_e]; if (!res) break; res = res[item]; } propertyInstance = res; - const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; + var polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; // checking that the model property name (key)(ex: "fishtype") and the // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") @@ -20018,14 +20607,15 @@ function deserializeCompositeType(serializer, mapper, responseBody, objectName, propertyInstance == undefined) { propertyInstance = mapper.serializedName; } - let serializedValue; + var serializedValue = void 0; // paging if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { propertyInstance = responseBody[key]; - const arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + var arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); // Copy over any properties that have already been added into the instance, where they do // not exist on the newly de-serialized array - for (const [k, v] of Object.entries(instance)) { + for (var _f = 0, _g = Object.entries(instance); _f < _g.length; _f++) { + var _h = _g[_f], k = _h[0], v = _h[1]; if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) { arrayInstance[k] = v; } @@ -20038,25 +20628,26 @@ function deserializeCompositeType(serializer, mapper, responseBody, objectName, } } } - const additionalPropertiesMapper = mapper.type.additionalProperties; + var additionalPropertiesMapper = mapper.type.additionalProperties; if (additionalPropertiesMapper) { - const isAdditionalProperty = (responsePropName) => { - for (const clientPropName in modelProps) { - const paths = splitSerializeName(modelProps[clientPropName].serializedName); + var isAdditionalProperty = function (responsePropName) { + for (var clientPropName in modelProps) { + var paths = splitSerializeName(modelProps[clientPropName].serializedName); if (paths[0] === responsePropName) { return false; } } return true; }; - for (const responsePropName in responseBody) { + for (var responsePropName in responseBody) { if (isAdditionalProperty(responsePropName)) { instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]', options); } } } else if (responseBody) { - for (const key of Object.keys(responseBody)) { + for (var _j = 0, _k = Object.keys(responseBody); _j < _k.length; _j++) { + var key = _k[_j]; if (instance[key] === undefined && !handledPropertyNames.includes(key) && !isSpecialXmlProperty(key, options)) { @@ -20067,14 +20658,15 @@ function deserializeCompositeType(serializer, mapper, responseBody, objectName, return instance; } function deserializeDictionaryType(serializer, mapper, responseBody, objectName, options) { - const value = mapper.type.value; + var value = mapper.type.value; if (!value || typeof value !== "object") { - throw new Error(`"value" metadata for a Dictionary must be defined in the ` + - `mapper and it must of type "object" in ${objectName}`); + throw new Error("\"value\" metadata for a Dictionary must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName)); } if (responseBody) { - const tempDictionary = {}; - for (const key of Object.keys(responseBody)) { + var tempDictionary = {}; + for (var _i = 0, _a = Object.keys(responseBody); _i < _a.length; _i++) { + var key = _a[_i]; tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options); } return tempDictionary; @@ -20082,36 +20674,36 @@ function deserializeDictionaryType(serializer, mapper, responseBody, objectName, return responseBody; } function deserializeSequenceType(serializer, mapper, responseBody, objectName, options) { - const element = mapper.type.element; + var element = mapper.type.element; if (!element || typeof element !== "object") { - throw new Error(`element" metadata for an Array must be defined in the ` + - `mapper and it must of type "object" in ${objectName}`); + throw new Error("element\" metadata for an Array must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName)); } if (responseBody) { if (!Array.isArray(responseBody)) { // xml2js will interpret a single element array as just the element, so force it to be an array responseBody = [responseBody]; } - const tempArray = []; - for (let i = 0; i < responseBody.length; i++) { - tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`, options); + var tempArray = []; + for (var i = 0; i < responseBody.length; i++) { + tempArray[i] = serializer.deserialize(element, responseBody[i], objectName + "[" + i + "]", options); } return tempArray; } return responseBody; } function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { - const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + var polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); if (polymorphicDiscriminator) { - const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; + var discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; if (discriminatorName != undefined) { - const discriminatorValue = object[discriminatorName]; + var discriminatorValue = object[discriminatorName]; if (discriminatorValue != undefined) { - const typeName = mapper.type.uberParent || mapper.type.className; - const indexDiscriminator = discriminatorValue === typeName + var typeName = mapper.type.uberParent || mapper.type.className; + var indexDiscriminator = discriminatorValue === typeName ? discriminatorValue : typeName + "." + discriminatorValue; - const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator]; + var polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator]; if (polymorphicMapper) { mapper = polymorphicMapper; } @@ -20132,7 +20724,7 @@ function getPolymorphicDiscriminatorSafely(serializer, typeName) { } // TODO: why is this here? function serializeObject(toSerialize) { - const castToSerialize = toSerialize; + var castToSerialize = toSerialize; if (toSerialize == undefined) return undefined; if (toSerialize instanceof Uint8Array) { @@ -20143,15 +20735,15 @@ function serializeObject(toSerialize) { return toSerialize.toISOString(); } else if (Array.isArray(toSerialize)) { - const array = []; - for (let i = 0; i < toSerialize.length; i++) { + var array = []; + for (var i = 0; i < toSerialize.length; i++) { array.push(serializeObject(toSerialize[i])); } return array; } else if (typeof toSerialize === "object") { - const dictionary = {}; - for (const property in toSerialize) { + var dictionary = {}; + for (var property in toSerialize) { dictionary[property] = serializeObject(castToSerialize[property]); } return dictionary; @@ -20162,14 +20754,15 @@ function serializeObject(toSerialize) { * Utility function to create a K:V from a list of strings */ function strEnum(o) { - const result = {}; - for (const key of o) { + var result = {}; + for (var _i = 0, o_1 = o; _i < o_1.length; _i++) { + var key = o_1[_i]; result[key] = key; } return result; } // eslint-disable-next-line @typescript-eslint/no-redeclare -const MapperType = strEnum([ +var MapperType = strEnum([ "Base64Url", "Boolean", "ByteArray", @@ -20191,7 +20784,7 @@ const MapperType = strEnum([ // Copyright (c) Microsoft Corporation. function isWebResourceLike(object) { if (object && typeof object === "object") { - const castObject = object; + var castObject = object; if (typeof castObject.url === "string" && typeof castObject.method === "string" && typeof castObject.headers === "object" && @@ -20210,8 +20803,8 @@ function isWebResourceLike(object) { * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary * properties to initiate a request. */ -class WebResource { - constructor(url, method, body, query, headers, streamResponseBody, withCredentials, abortSignal, timeout, onUploadProgress, onDownloadProgress, proxySettings, keepAlive, decompressResponse, streamResponseStatusCodes) { +var WebResource = /** @class */ (function () { + function WebResource(url, method, body, query, headers, streamResponseBody, withCredentials, abortSignal, timeout, onUploadProgress, onDownloadProgress, proxySettings, keepAlive, decompressResponse, streamResponseStatusCodes) { this.streamResponseBody = streamResponseBody; this.streamResponseStatusCodes = streamResponseStatusCodes; this.url = url || ""; @@ -20235,20 +20828,20 @@ class WebResource { * headers["accept-language"] are defined. It will throw an error if one of the above * mentioned properties are not defined. */ - validateRequestProperties() { + WebResource.prototype.validateRequestProperties = function () { if (!this.method) { throw new Error("WebResource.method is required."); } if (!this.url) { throw new Error("WebResource.url is required."); } - } + }; /** * Prepares the request. * @param options - Options to provide for preparing the request. * @returns Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. */ - prepare(options) { + WebResource.prototype.prepare = function (options) { if (!options) { throw new Error("options object is required"); } @@ -20277,7 +20870,7 @@ class WebResource { } // set the method if (options.method) { - const validMethods = ["GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", "PATCH", "TRACE"]; + var validMethods = ["GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", "PATCH", "TRACE"]; if (validMethods.indexOf(options.method.toUpperCase()) === -1) { throw new Error('The provided method "' + options.method + @@ -20288,70 +20881,70 @@ class WebResource { this.method = options.method.toUpperCase(); // construct the url if path template is provided if (options.pathTemplate) { - const { pathTemplate, pathParameters } = options; - if (typeof pathTemplate !== "string") { + var pathTemplate_1 = options.pathTemplate, pathParameters_1 = options.pathParameters; + if (typeof pathTemplate_1 !== "string") { throw new Error('options.pathTemplate must be of type "string".'); } if (!options.baseUrl) { options.baseUrl = "https://management.azure.com"; } - const baseUrl = options.baseUrl; - let url = baseUrl + + var baseUrl = options.baseUrl; + var url_1 = baseUrl + (baseUrl.endsWith("/") ? "" : "/") + - (pathTemplate.startsWith("/") ? pathTemplate.slice(1) : pathTemplate); - const segments = url.match(/({[\w-]*\s*[\w-]*})/gi); + (pathTemplate_1.startsWith("/") ? pathTemplate_1.slice(1) : pathTemplate_1); + var segments = url_1.match(/({[\w-]*\s*[\w-]*})/gi); if (segments && segments.length) { - if (!pathParameters) { - throw new Error(`pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.`); + if (!pathParameters_1) { + throw new Error("pathTemplate: " + pathTemplate_1 + " has been provided. Hence, options.pathParameters must also be provided."); } segments.forEach(function (item) { - const pathParamName = item.slice(1, -1); - const pathParam = pathParameters[pathParamName]; + var pathParamName = item.slice(1, -1); + var pathParam = pathParameters_1[pathParamName]; if (pathParam === null || pathParam === undefined || !(typeof pathParam === "string" || typeof pathParam === "object")) { - const stringifiedPathParameters = JSON.stringify(pathParameters, undefined, 2); - throw new Error(`pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` + - ` however, it is not present in parameters: ${stringifiedPathParameters}.` + - `The value of the path parameter can either be a "string" of the form { ${pathParamName}: "some sample value" } or ` + - `it can be an "object" of the form { "${pathParamName}": { value: "some sample value", skipUrlEncoding: true } }.`); + var stringifiedPathParameters = JSON.stringify(pathParameters_1, undefined, 2); + throw new Error("pathTemplate: " + pathTemplate_1 + " contains the path parameter " + pathParamName + + (" however, it is not present in parameters: " + stringifiedPathParameters + ".") + + ("The value of the path parameter can either be a \"string\" of the form { " + pathParamName + ": \"some sample value\" } or ") + + ("it can be an \"object\" of the form { \"" + pathParamName + "\": { value: \"some sample value\", skipUrlEncoding: true } }.")); } if (typeof pathParam.valueOf() === "string") { - url = url.replace(item, encodeURIComponent(pathParam)); + url_1 = url_1.replace(item, encodeURIComponent(pathParam)); } if (typeof pathParam.valueOf() === "object") { if (!pathParam.value) { - throw new Error(`options.pathParameters[${pathParamName}] is of type "object" but it does not contain a "value" property.`); + throw new Error("options.pathParameters[" + pathParamName + "] is of type \"object\" but it does not contain a \"value\" property."); } if (pathParam.skipUrlEncoding) { - url = url.replace(item, pathParam.value); + url_1 = url_1.replace(item, pathParam.value); } else { - url = url.replace(item, encodeURIComponent(pathParam.value)); + url_1 = url_1.replace(item, encodeURIComponent(pathParam.value)); } } }); } - this.url = url; + this.url = url_1; } // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option. if (options.queryParameters) { - const queryParameters = options.queryParameters; + var queryParameters = options.queryParameters; if (typeof queryParameters !== "object") { - throw new Error(`options.queryParameters must be of type object. It should be a JSON object ` + - `of "query-parameter-name" as the key and the "query-parameter-value" as the value. ` + - `The "query-parameter-value" may be fo type "string" or an "object" of the form { value: "query-parameter-value", skipUrlEncoding: true }.`); + throw new Error("options.queryParameters must be of type object. It should be a JSON object " + + "of \"query-parameter-name\" as the key and the \"query-parameter-value\" as the value. " + + "The \"query-parameter-value\" may be fo type \"string\" or an \"object\" of the form { value: \"query-parameter-value\", skipUrlEncoding: true }."); } // append question mark if it is not present in the url if (this.url && this.url.indexOf("?") === -1) { this.url += "?"; } // construct queryString - const queryParams = []; + var queryParams = []; // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest(). this.query = {}; - for (const queryParamName in queryParameters) { - const queryParam = queryParameters[queryParamName]; + for (var queryParamName in queryParameters) { + var queryParam = queryParameters[queryParamName]; if (queryParam) { if (typeof queryParam === "string") { queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam)); @@ -20359,7 +20952,7 @@ class WebResource { } else if (typeof queryParam === "object") { if (!queryParam.value) { - throw new Error(`options.queryParameters[${queryParamName}] is of type "object" but it does not contain a "value" property.`); + throw new Error("options.queryParameters[" + queryParamName + "] is of type \"object\" but it does not contain a \"value\" property."); } if (queryParam.skipUrlEncoding) { queryParams.push(queryParamName + "=" + queryParam.value); @@ -20377,8 +20970,9 @@ class WebResource { } // add headers to the request if they are provided if (options.headers) { - const headers = options.headers; - for (const headerName of Object.keys(options.headers)) { + var headers = options.headers; + for (var _i = 0, _a = Object.keys(options.headers); _i < _a.length; _i++) { + var headerName = _a[_i]; this.headers.set(headerName, headers[headerName]); } } @@ -20425,13 +21019,13 @@ class WebResource { this.onDownloadProgress = options.onDownloadProgress; this.onUploadProgress = options.onUploadProgress; return this; - } + }; /** * Clone this WebResource HTTP request object. * @returns The clone of this WebResource HTTP request object. */ - clone() { - const result = new WebResource(this.url, this.method, this.body, this.query, this.headers && this.headers.clone(), this.streamResponseBody, this.withCredentials, this.abortSignal, this.timeout, this.onUploadProgress, this.onDownloadProgress, this.proxySettings, this.keepAlive, this.decompressResponse, this.streamResponseStatusCodes); + WebResource.prototype.clone = function () { + var result = new WebResource(this.url, this.method, this.body, this.query, this.headers && this.headers.clone(), this.streamResponseBody, this.withCredentials, this.abortSignal, this.timeout, this.onUploadProgress, this.onDownloadProgress, this.proxySettings, this.keepAlive, this.decompressResponse, this.streamResponseStatusCodes); if (this.formData) { result.formData = this.formData; } @@ -20445,42 +21039,43 @@ class WebResource { result.operationResponseGetter = this.operationResponseGetter; } return result; - } -} + }; + return WebResource; +}()); // Copyright (c) Microsoft Corporation. -const custom = util.inspect.custom; +var custom = util.inspect.custom; // Copyright (c) Microsoft Corporation. /** * A class that handles the query portion of a URLBuilder. */ -class URLQuery { - constructor() { +var URLQuery = /** @class */ (function () { + function URLQuery() { this._rawQuery = {}; } /** * Get whether or not there any query parameters in this URLQuery. */ - any() { + URLQuery.prototype.any = function () { return Object.keys(this._rawQuery).length > 0; - } + }; /** * Get the keys of the query string. */ - keys() { + URLQuery.prototype.keys = function () { return Object.keys(this._rawQuery); - } + }; /** * Set a query parameter with the provided name and value. If the parameterValue is undefined or * empty, then this will attempt to remove an existing query parameter with the provided * parameterName. */ - set(parameterName, parameterValue) { - const caseParameterValue = parameterValue; + URLQuery.prototype.set = function (parameterName, parameterValue) { + var caseParameterValue = parameterValue; if (parameterName) { if (caseParameterValue !== undefined && caseParameterValue !== null) { - const newValue = Array.isArray(caseParameterValue) + var newValue = Array.isArray(caseParameterValue) ? caseParameterValue : caseParameterValue.toString(); this._rawQuery[parameterName] = newValue; @@ -20489,51 +21084,52 @@ class URLQuery { delete this._rawQuery[parameterName]; } } - } + }; /** * Get the value of the query parameter with the provided name. If no parameter exists with the * provided parameter name, then undefined will be returned. */ - get(parameterName) { + URLQuery.prototype.get = function (parameterName) { return parameterName ? this._rawQuery[parameterName] : undefined; - } + }; /** * Get the string representation of this query. The return value will not start with a "?". */ - toString() { - let result = ""; - for (const parameterName in this._rawQuery) { + URLQuery.prototype.toString = function () { + var result = ""; + for (var parameterName in this._rawQuery) { if (result) { result += "&"; } - const parameterValue = this._rawQuery[parameterName]; + var parameterValue = this._rawQuery[parameterName]; if (Array.isArray(parameterValue)) { - const parameterStrings = []; - for (const parameterValueElement of parameterValue) { - parameterStrings.push(`${parameterName}=${parameterValueElement}`); + var parameterStrings = []; + for (var _i = 0, parameterValue_1 = parameterValue; _i < parameterValue_1.length; _i++) { + var parameterValueElement = parameterValue_1[_i]; + parameterStrings.push(parameterName + "=" + parameterValueElement); } result += parameterStrings.join("&"); } else { - result += `${parameterName}=${parameterValue}`; + result += parameterName + "=" + parameterValue; } } return result; - } + }; /** * Parse a URLQuery from the provided text. */ - static parse(text) { - const result = new URLQuery(); + URLQuery.parse = function (text) { + var result = new URLQuery(); if (text) { if (text.startsWith("?")) { text = text.substring(1); } - let currentState = "ParameterName"; - let parameterName = ""; - let parameterValue = ""; - for (let i = 0; i < text.length; ++i) { - const currentCharacter = text[i]; + var currentState = "ParameterName"; + var parameterName = ""; + var parameterValue = ""; + for (var i = 0; i < text.length; ++i) { + var currentCharacter = text[i]; switch (currentState) { case "ParameterName": switch (currentCharacter) { @@ -20571,78 +21167,81 @@ class URLQuery { } } return result; - } -} + }; + return URLQuery; +}()); /** * A class that handles creating, modifying, and parsing URLs. */ -class URLBuilder { +var URLBuilder = /** @class */ (function () { + function URLBuilder() { + } /** * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL * (such as a host, port, path, or query), those parts will be added to this URL as well. */ - setScheme(scheme) { + URLBuilder.prototype.setScheme = function (scheme) { if (!scheme) { this._scheme = undefined; } else { this.set(scheme, "SCHEME"); } - } + }; /** * Get the scheme that has been set in this URL. */ - getScheme() { + URLBuilder.prototype.getScheme = function () { return this._scheme; - } + }; /** * Set the host for this URL. If the provided host contains other parts of a URL (such as a * port, path, or query), those parts will be added to this URL as well. */ - setHost(host) { + URLBuilder.prototype.setHost = function (host) { if (!host) { this._host = undefined; } else { this.set(host, "SCHEME_OR_HOST"); } - } + }; /** * Get the host that has been set in this URL. */ - getHost() { + URLBuilder.prototype.getHost = function () { return this._host; - } + }; /** * Set the port for this URL. If the provided port contains other parts of a URL (such as a * path or query), those parts will be added to this URL as well. */ - setPort(port) { + URLBuilder.prototype.setPort = function (port) { if (port === undefined || port === null || port === "") { this._port = undefined; } else { this.set(port.toString(), "PORT"); } - } + }; /** * Get the port that has been set in this URL. */ - getPort() { + URLBuilder.prototype.getPort = function () { return this._port; - } + }; /** * Set the path for this URL. If the provided path contains a query, then it will be added to * this URL as well. */ - setPath(path) { + URLBuilder.prototype.setPath = function (path) { if (!path) { this._path = undefined; } else { - const schemeIndex = path.indexOf("://"); + var schemeIndex = path.indexOf("://"); if (schemeIndex !== -1) { - const schemeStart = path.lastIndexOf("/", schemeIndex); + var schemeStart = path.lastIndexOf("/", schemeIndex); // Make sure to only grab the URL part of the path before setting the state back to SCHEME // this will handle cases such as "/a/b/c/https://microsoft.com" => "https://microsoft.com" this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), "SCHEME"); @@ -20651,14 +21250,14 @@ class URLBuilder { this.set(path, "PATH"); } } - } + }; /** * Append the provided path to this URL's existing path. If the provided path contains a query, * then it will be added to this URL as well. */ - appendPath(path) { + URLBuilder.prototype.appendPath = function (path) { if (path) { - let currentPath = this.getPath(); + var currentPath = this.getPath(); if (currentPath) { if (!currentPath.endsWith("/")) { currentPath += "/"; @@ -20670,58 +21269,58 @@ class URLBuilder { } this.set(path, "PATH"); } - } + }; /** * Get the path that has been set in this URL. */ - getPath() { + URLBuilder.prototype.getPath = function () { return this._path; - } + }; /** * Set the query in this URL. */ - setQuery(query) { + URLBuilder.prototype.setQuery = function (query) { if (!query) { this._query = undefined; } else { this._query = URLQuery.parse(query); } - } + }; /** * Set a query parameter with the provided name and value in this URL's query. If the provided * query parameter value is undefined or empty, then the query parameter will be removed if it * existed. */ - setQueryParameter(queryParameterName, queryParameterValue) { + URLBuilder.prototype.setQueryParameter = function (queryParameterName, queryParameterValue) { if (queryParameterName) { if (!this._query) { this._query = new URLQuery(); } this._query.set(queryParameterName, queryParameterValue); } - } + }; /** * Get the value of the query parameter with the provided query parameter name. If no query * parameter exists with the provided name, then undefined will be returned. */ - getQueryParameterValue(queryParameterName) { + URLBuilder.prototype.getQueryParameterValue = function (queryParameterName) { return this._query ? this._query.get(queryParameterName) : undefined; - } + }; /** * Get the query in this URL. */ - getQuery() { + URLBuilder.prototype.getQuery = function () { return this._query ? this._query.toString() : undefined; - } + }; /** * Set the parts of this URL by parsing the provided text using the provided startState. */ - set(text, startState) { - const tokenizer = new URLTokenizer(text, startState); + URLBuilder.prototype.set = function (text, startState) { + var tokenizer = new URLTokenizer(text, startState); while (tokenizer.next()) { - const token = tokenizer.current(); - let tokenPath; + var token = tokenizer.current(); + var tokenPath = void 0; if (token) { switch (token.type) { case "SCHEME": @@ -20743,21 +21342,21 @@ class URLBuilder { this._query = URLQuery.parse(token.text); break; default: - throw new Error(`Unrecognized URLTokenType: ${token.type}`); + throw new Error("Unrecognized URLTokenType: " + token.type); } } } - } - toString() { - let result = ""; + }; + URLBuilder.prototype.toString = function () { + var result = ""; if (this._scheme) { - result += `${this._scheme}://`; + result += this._scheme + "://"; } if (this._host) { result += this._host; } if (this._port) { - result += `:${this._port}`; + result += ":" + this._port; } if (this._path) { if (!this._path.startsWith("/")) { @@ -20766,15 +21365,15 @@ class URLBuilder { result += this._path; } if (this._query && this._query.any()) { - result += `?${this._query.toString()}`; + result += "?" + this._query.toString(); } return result; - } + }; /** * If the provided searchValue is found in this URLBuilder, then replace it with the provided * replaceValue. */ - replaceAll(searchValue, replaceValue) { + URLBuilder.prototype.replaceAll = function (searchValue, replaceValue) { if (searchValue) { this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue)); this.setHost(replaceAll(this.getHost(), searchValue, replaceValue)); @@ -20782,40 +21381,42 @@ class URLBuilder { this.setPath(replaceAll(this.getPath(), searchValue, replaceValue)); this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); } - } - static parse(text) { - const result = new URLBuilder(); + }; + URLBuilder.parse = function (text) { + var result = new URLBuilder(); result.set(text, "SCHEME_OR_HOST"); return result; - } -} -class URLToken { - constructor(text, type) { + }; + return URLBuilder; +}()); +var URLToken = /** @class */ (function () { + function URLToken(text, type) { this.text = text; this.type = type; } - static scheme(text) { + URLToken.scheme = function (text) { return new URLToken(text, "SCHEME"); - } - static host(text) { + }; + URLToken.host = function (text) { return new URLToken(text, "HOST"); - } - static port(text) { + }; + URLToken.port = function (text) { return new URLToken(text, "PORT"); - } - static path(text) { + }; + URLToken.path = function (text) { return new URLToken(text, "PATH"); - } - static query(text) { + }; + URLToken.query = function (text) { return new URLToken(text, "QUERY"); - } -} + }; + return URLToken; +}()); /** * Get whether or not the provided character (single character string) is an alphanumeric (letter or * digit) character. */ function isAlphaNumericCharacter(character) { - const characterCode = character.charCodeAt(0); + var characterCode = character.charCodeAt(0); return ((48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ || (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ || (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */); @@ -20823,8 +21424,8 @@ function isAlphaNumericCharacter(character) { /** * A class that tokenizes URL strings. */ -class URLTokenizer { - constructor(_text, state) { +var URLTokenizer = /** @class */ (function () { + function URLTokenizer(_text, state) { this._text = _text; this._textLength = _text ? _text.length : 0; this._currentState = state !== undefined && state !== null ? state : "SCHEME_OR_HOST"; @@ -20834,13 +21435,13 @@ class URLTokenizer { * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer * hasn't started or has finished tokenizing. */ - current() { + URLTokenizer.prototype.current = function () { return this._currentToken; - } + }; /** * Advance to the next URLToken and return whether or not a URLToken was found. */ - next() { + URLTokenizer.prototype.next = function () { if (!hasCurrentCharacter(this)) { this._currentToken = undefined; } @@ -20865,17 +21466,18 @@ class URLTokenizer { nextQuery(this); break; default: - throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`); + throw new Error("Unrecognized URLTokenizerState: " + this._currentState); } } return !!this._currentToken; - } -} + }; + return URLTokenizer; +}()); /** * Read the remaining characters from this Tokenizer's character stream. */ function readRemaining(tokenizer) { - let result = ""; + var result = ""; if (tokenizer._currentIndex < tokenizer._textLength) { result = tokenizer._text.substring(tokenizer._currentIndex); tokenizer._currentIndex = tokenizer._textLength; @@ -20911,7 +21513,7 @@ function nextCharacter(tokenizer, step) { * Tokenizer's stream of characters. */ function peekCharacters(tokenizer, charactersToPeek) { - let endIndex = tokenizer._currentIndex + charactersToPeek; + var endIndex = tokenizer._currentIndex + charactersToPeek; if (tokenizer._textLength < endIndex) { endIndex = tokenizer._textLength; } @@ -20922,9 +21524,9 @@ function peekCharacters(tokenizer, charactersToPeek) { * is false when provided the current character. */ function readWhile(tokenizer, condition) { - let result = ""; + var result = ""; while (hasCurrentCharacter(tokenizer)) { - const currentCharacter = getCurrentCharacter(tokenizer); + var currentCharacter = getCurrentCharacter(tokenizer); if (!condition(currentCharacter)) { break; } @@ -20940,17 +21542,21 @@ function readWhile(tokenizer, condition) { * character stream is reached. */ function readWhileLetterOrDigit(tokenizer) { - return readWhile(tokenizer, (character) => isAlphaNumericCharacter(character)); + return readWhile(tokenizer, function (character) { return isAlphaNumericCharacter(character); }); } /** * Read characters from this Tokenizer until one of the provided terminating characters is read or * the end of the character stream is reached. */ -function readUntilCharacter(tokenizer, ...terminatingCharacters) { - return readWhile(tokenizer, (character) => terminatingCharacters.indexOf(character) === -1); +function readUntilCharacter(tokenizer) { + var terminatingCharacters = []; + for (var _i = 1; _i < arguments.length; _i++) { + terminatingCharacters[_i - 1] = arguments[_i]; + } + return readWhile(tokenizer, function (character) { return terminatingCharacters.indexOf(character) === -1; }); } function nextScheme(tokenizer) { - const scheme = readWhileLetterOrDigit(tokenizer); + var scheme = readWhileLetterOrDigit(tokenizer); tokenizer._currentToken = URLToken.scheme(scheme); if (!hasCurrentCharacter(tokenizer)) { tokenizer._currentState = "DONE"; @@ -20960,7 +21566,7 @@ function nextScheme(tokenizer) { } } function nextSchemeOrHost(tokenizer) { - const schemeOrHost = readUntilCharacter(tokenizer, ":", "/", "?"); + var schemeOrHost = readUntilCharacter(tokenizer, ":", "/", "?"); if (!hasCurrentCharacter(tokenizer)) { tokenizer._currentToken = URLToken.host(schemeOrHost); tokenizer._currentState = "DONE"; @@ -20989,7 +21595,7 @@ function nextHost(tokenizer) { if (peekCharacters(tokenizer, 3) === "://") { nextCharacter(tokenizer, 3); } - const host = readUntilCharacter(tokenizer, ":", "/", "?"); + var host = readUntilCharacter(tokenizer, ":", "/", "?"); tokenizer._currentToken = URLToken.host(host); if (!hasCurrentCharacter(tokenizer)) { tokenizer._currentState = "DONE"; @@ -21008,7 +21614,7 @@ function nextPort(tokenizer) { if (getCurrentCharacter(tokenizer) === ":") { nextCharacter(tokenizer); } - const port = readUntilCharacter(tokenizer, "/", "?"); + var port = readUntilCharacter(tokenizer, "/", "?"); tokenizer._currentToken = URLToken.port(port); if (!hasCurrentCharacter(tokenizer)) { tokenizer._currentState = "DONE"; @@ -21021,7 +21627,7 @@ function nextPort(tokenizer) { } } function nextPath(tokenizer) { - const path = readUntilCharacter(tokenizer, "?"); + var path = readUntilCharacter(tokenizer, "?"); tokenizer._currentToken = URLToken.path(path); if (!hasCurrentCharacter(tokenizer)) { tokenizer._currentState = "DONE"; @@ -21034,14 +21640,14 @@ function nextQuery(tokenizer) { if (getCurrentCharacter(tokenizer) === "?") { nextCharacter(tokenizer); } - const query = readRemaining(tokenizer); + var query = readRemaining(tokenizer); tokenizer._currentToken = URLToken.query(query); tokenizer._currentState = "DONE"; } // Copyright (c) Microsoft Corporation. -const RedactedString = "REDACTED"; -const defaultAllowedHeaderNames = [ +var RedactedString = "REDACTED"; +var defaultAllowedHeaderNames = [ "x-ms-client-request-id", "x-ms-return-client-request-id", "x-ms-useragent", @@ -21081,33 +21687,35 @@ const defaultAllowedHeaderNames = [ "Transfer-Encoding", "User-Agent" ]; -const defaultAllowedQueryParameters = ["api-version"]; -class Sanitizer { - constructor({ allowedHeaderNames = [], allowedQueryParameters = [] } = {}) { +var defaultAllowedQueryParameters = ["api-version"]; +var Sanitizer = /** @class */ (function () { + function Sanitizer(_a) { + var _b = _a === void 0 ? {} : _a, _c = _b.allowedHeaderNames, allowedHeaderNames = _c === void 0 ? [] : _c, _d = _b.allowedQueryParameters, allowedQueryParameters = _d === void 0 ? [] : _d; allowedHeaderNames = Array.isArray(allowedHeaderNames) ? defaultAllowedHeaderNames.concat(allowedHeaderNames) : defaultAllowedHeaderNames; allowedQueryParameters = Array.isArray(allowedQueryParameters) ? defaultAllowedQueryParameters.concat(allowedQueryParameters) : defaultAllowedQueryParameters; - this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase())); - this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase())); + this.allowedHeaderNames = new Set(allowedHeaderNames.map(function (n) { return n.toLowerCase(); })); + this.allowedQueryParameters = new Set(allowedQueryParameters.map(function (p) { return p.toLowerCase(); })); } - sanitize(obj) { - const seen = new Set(); - return JSON.stringify(obj, (key, value) => { + Sanitizer.prototype.sanitize = function (obj) { + var _this = this; + var seen = new Set(); + return JSON.stringify(obj, function (key, value) { // Ensure Errors include their interesting non-enumerable members if (value instanceof Error) { - return Object.assign(Object.assign({}, value), { name: value.name, message: value.message }); + return tslib.__assign(tslib.__assign({}, value), { name: value.name, message: value.message }); } if (key === "_headersMap") { - return this.sanitizeHeaders(value); + return _this.sanitizeHeaders(value); } else if (key === "url") { - return this.sanitizeUrl(value); + return _this.sanitizeUrl(value); } else if (key === "query") { - return this.sanitizeQuery(value); + return _this.sanitizeQuery(value); } else if (key === "body") { // Don't log the request body @@ -21130,19 +21738,20 @@ class Sanitizer { } return value; }, 2); - } - sanitizeHeaders(value) { - return this.sanitizeObject(value, this.allowedHeaderNames, (v, k) => v[k].value); - } - sanitizeQuery(value) { - return this.sanitizeObject(value, this.allowedQueryParameters, (v, k) => v[k]); - } - sanitizeObject(value, allowedKeys, accessor) { + }; + Sanitizer.prototype.sanitizeHeaders = function (value) { + return this.sanitizeObject(value, this.allowedHeaderNames, function (v, k) { return v[k].value; }); + }; + Sanitizer.prototype.sanitizeQuery = function (value) { + return this.sanitizeObject(value, this.allowedQueryParameters, function (v, k) { return v[k]; }); + }; + Sanitizer.prototype.sanitizeObject = function (value, allowedKeys, accessor) { if (typeof value !== "object" || value === null) { return value; } - const sanitized = {}; - for (const k of Object.keys(value)) { + var sanitized = {}; + for (var _i = 0, _a = Object.keys(value); _i < _a.length; _i++) { + var k = _a[_i]; if (allowedKeys.has(k.toLowerCase())) { sanitized[k] = accessor(value, k); } @@ -21151,230 +21760,267 @@ class Sanitizer { } } return sanitized; - } - sanitizeUrl(value) { + }; + Sanitizer.prototype.sanitizeUrl = function (value) { if (typeof value !== "string" || value === null) { return value; } - const urlBuilder = URLBuilder.parse(value); - const queryString = urlBuilder.getQuery(); + var urlBuilder = URLBuilder.parse(value); + var queryString = urlBuilder.getQuery(); if (!queryString) { return value; } - const query = URLQuery.parse(queryString); - for (const k of query.keys()) { + var query = URLQuery.parse(queryString); + for (var _i = 0, _a = query.keys(); _i < _a.length; _i++) { + var k = _a[_i]; if (!this.allowedQueryParameters.has(k.toLowerCase())) { query.set(k, RedactedString); } } urlBuilder.setQuery(query.toString()); return urlBuilder.toString(); - } -} + }; + return Sanitizer; +}()); // Copyright (c) Microsoft Corporation. -const errorSanitizer = new Sanitizer(); -class RestError extends Error { - constructor(message, code, statusCode, request, response) { - super(message); - this.name = "RestError"; - this.code = code; - this.statusCode = statusCode; - this.request = request; - this.response = response; - Object.setPrototypeOf(this, RestError.prototype); +var errorSanitizer = new Sanitizer(); +var RestError = /** @class */ (function (_super) { + tslib.__extends(RestError, _super); + function RestError(message, code, statusCode, request, response) { + var _this = _super.call(this, message) || this; + _this.name = "RestError"; + _this.code = code; + _this.statusCode = statusCode; + _this.request = request; + _this.response = response; + Object.setPrototypeOf(_this, RestError.prototype); + return _this; } /** * Logging method for util.inspect in Node */ - [custom]() { - return `RestError: ${this.message} \n ${errorSanitizer.sanitize(this)}`; - } -} -RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; -RestError.PARSE_ERROR = "PARSE_ERROR"; + RestError.prototype[custom] = function () { + return "RestError: " + this.message + " \n " + errorSanitizer.sanitize(this); + }; + RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; + RestError.PARSE_ERROR = "PARSE_ERROR"; + return RestError; +}(Error)); // Copyright (c) Microsoft Corporation. -const logger = logger$1.createClientLogger("core-http"); +var logger = logger$1.createClientLogger("core-http"); // Copyright (c) Microsoft Corporation. -class ReportTransform extends stream.Transform { - constructor(progressCallback) { - super(); - this.progressCallback = progressCallback; - this.loadedBytes = 0; +var ReportTransform = /** @class */ (function (_super) { + tslib.__extends(ReportTransform, _super); + function ReportTransform(progressCallback) { + var _this = _super.call(this) || this; + _this.progressCallback = progressCallback; + _this.loadedBytes = 0; + return _this; } - _transform(chunk, _encoding, callback) { + ReportTransform.prototype._transform = function (chunk, _encoding, callback) { this.push(chunk); this.loadedBytes += chunk.length; this.progressCallback({ loadedBytes: this.loadedBytes }); callback(undefined); + }; + return ReportTransform; +}(stream.Transform)); +var FetchHttpClient = /** @class */ (function () { + function FetchHttpClient() { } -} -class FetchHttpClient { - async sendRequest(httpRequest) { + FetchHttpClient.prototype.sendRequest = function (httpRequest) { var _a; - if (!httpRequest && typeof httpRequest !== "object") { - throw new Error("'httpRequest' (WebResourceLike) cannot be null or undefined and must be of type object."); - } - const abortController$1 = new abortController.AbortController(); - let abortListener; - if (httpRequest.abortSignal) { - if (httpRequest.abortSignal.aborted) { - throw new abortController.AbortError("The operation was aborted."); - } - abortListener = (event) => { - if (event.type === "abort") { - abortController$1.abort(); - } - }; - httpRequest.abortSignal.addEventListener("abort", abortListener); - } - if (httpRequest.timeout) { - setTimeout(() => { - abortController$1.abort(); - }, httpRequest.timeout); - } - if (httpRequest.formData) { - const formData = httpRequest.formData; - const requestForm = new FormData(); - const appendFormValue = (key, value) => { - // value function probably returns a stream so we can provide a fresh stream on each retry - if (typeof value === "function") { - value = value(); - } - if (value && - Object.prototype.hasOwnProperty.call(value, "value") && - Object.prototype.hasOwnProperty.call(value, "options")) { - requestForm.append(key, value.value, value.options); - } - else { - requestForm.append(key, value); - } - }; - for (const formKey of Object.keys(formData)) { - const formValue = formData[formKey]; - if (Array.isArray(formValue)) { - for (let j = 0; j < formValue.length; j++) { - appendFormValue(formKey, formValue[j]); - } - } - else { - appendFormValue(formKey, formValue); - } - } - httpRequest.body = requestForm; - httpRequest.formData = undefined; - const contentType = httpRequest.headers.get("Content-Type"); - if (contentType && contentType.indexOf("multipart/form-data") !== -1) { - if (typeof requestForm.getBoundary === "function") { - httpRequest.headers.set("Content-Type", `multipart/form-data; boundary=${requestForm.getBoundary()}`); - } - else { - // browser will automatically apply a suitable content-type header - httpRequest.headers.remove("Content-Type"); - } - } - } - let body = httpRequest.body - ? typeof httpRequest.body === "function" - ? httpRequest.body() - : httpRequest.body - : undefined; - if (httpRequest.onUploadProgress && httpRequest.body) { - const onUploadProgress = httpRequest.onUploadProgress; - const uploadReportStream = new ReportTransform(onUploadProgress); - if (isReadableStream(body)) { - body.pipe(uploadReportStream); - } - else { - uploadReportStream.end(body); - } - body = uploadReportStream; - } - const platformSpecificRequestInit = await this.prepareRequest(httpRequest); - const requestInit = Object.assign({ body: body, headers: httpRequest.headers.rawHeaders(), method: httpRequest.method, signal: abortController$1.signal, redirect: "manual" }, platformSpecificRequestInit); - let operationResponse; - try { - const response = await this.fetch(httpRequest.url, requestInit); - const headers = parseHeaders(response.headers); - const streaming = ((_a = httpRequest.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(response.status)) || - httpRequest.streamResponseBody; - operationResponse = { - headers: headers, - request: httpRequest, - status: response.status, - readableStreamBody: streaming - ? response.body - : undefined, - bodyAsText: !streaming ? await response.text() : undefined - }; - const onDownloadProgress = httpRequest.onDownloadProgress; - if (onDownloadProgress) { - const responseBody = response.body || undefined; - if (isReadableStream(responseBody)) { - const downloadReportStream = new ReportTransform(onDownloadProgress); - responseBody.pipe(downloadReportStream); - operationResponse.readableStreamBody = downloadReportStream; - } - else { - const length = parseInt(headers.get("Content-Length")) || undefined; - if (length) { - // Calling callback for non-stream response for consistency with browser - onDownloadProgress({ loadedBytes: length }); - } + return tslib.__awaiter(this, void 0, void 0, function () { + var abortController$1, abortListener, formData, requestForm_1, appendFormValue, _i, _b, formKey, formValue, j, contentType, body, onUploadProgress, uploadReportStream, platformSpecificRequestInit, requestInit, operationResponse, response, headers, streaming, _c, onDownloadProgress, responseBody, downloadReportStream, length_1, error_1, fetchError, uploadStreamDone, downloadStreamDone; + var _d; + return tslib.__generator(this, function (_e) { + switch (_e.label) { + case 0: + if (!httpRequest && typeof httpRequest !== "object") { + throw new Error("'httpRequest' (WebResourceLike) cannot be null or undefined and must be of type object."); + } + abortController$1 = new abortController.AbortController(); + if (httpRequest.abortSignal) { + if (httpRequest.abortSignal.aborted) { + throw new abortController.AbortError("The operation was aborted."); + } + abortListener = function (event) { + if (event.type === "abort") { + abortController$1.abort(); + } + }; + httpRequest.abortSignal.addEventListener("abort", abortListener); + } + if (httpRequest.timeout) { + setTimeout(function () { + abortController$1.abort(); + }, httpRequest.timeout); + } + if (httpRequest.formData) { + formData = httpRequest.formData; + requestForm_1 = new FormData(); + appendFormValue = function (key, value) { + // value function probably returns a stream so we can provide a fresh stream on each retry + if (typeof value === "function") { + value = value(); + } + if (value && + Object.prototype.hasOwnProperty.call(value, "value") && + Object.prototype.hasOwnProperty.call(value, "options")) { + requestForm_1.append(key, value.value, value.options); + } + else { + requestForm_1.append(key, value); + } + }; + for (_i = 0, _b = Object.keys(formData); _i < _b.length; _i++) { + formKey = _b[_i]; + formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } + else { + appendFormValue(formKey, formValue); + } + } + httpRequest.body = requestForm_1; + httpRequest.formData = undefined; + contentType = httpRequest.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + if (typeof requestForm_1.getBoundary === "function") { + httpRequest.headers.set("Content-Type", "multipart/form-data; boundary=" + requestForm_1.getBoundary()); + } + else { + // browser will automatically apply a suitable content-type header + httpRequest.headers.remove("Content-Type"); + } + } + } + body = httpRequest.body + ? typeof httpRequest.body === "function" + ? httpRequest.body() + : httpRequest.body + : undefined; + if (httpRequest.onUploadProgress && httpRequest.body) { + onUploadProgress = httpRequest.onUploadProgress; + uploadReportStream = new ReportTransform(onUploadProgress); + if (isReadableStream(body)) { + body.pipe(uploadReportStream); + } + else { + uploadReportStream.end(body); + } + body = uploadReportStream; + } + return [4 /*yield*/, this.prepareRequest(httpRequest)]; + case 1: + platformSpecificRequestInit = _e.sent(); + requestInit = tslib.__assign({ body: body, headers: httpRequest.headers.rawHeaders(), method: httpRequest.method, signal: abortController$1.signal, redirect: "manual" }, platformSpecificRequestInit); + _e.label = 2; + case 2: + _e.trys.push([2, 8, 9, 10]); + return [4 /*yield*/, this.fetch(httpRequest.url, requestInit)]; + case 3: + response = _e.sent(); + headers = parseHeaders(response.headers); + streaming = ((_a = httpRequest.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(response.status)) || + httpRequest.streamResponseBody; + _d = { + headers: headers, + request: httpRequest, + status: response.status, + readableStreamBody: streaming + ? response.body + : undefined + }; + if (!!streaming) return [3 /*break*/, 5]; + return [4 /*yield*/, response.text()]; + case 4: + _c = _e.sent(); + return [3 /*break*/, 6]; + case 5: + _c = undefined; + _e.label = 6; + case 6: + operationResponse = (_d.bodyAsText = _c, + _d); + onDownloadProgress = httpRequest.onDownloadProgress; + if (onDownloadProgress) { + responseBody = response.body || undefined; + if (isReadableStream(responseBody)) { + downloadReportStream = new ReportTransform(onDownloadProgress); + responseBody.pipe(downloadReportStream); + operationResponse.readableStreamBody = downloadReportStream; + } + else { + length_1 = parseInt(headers.get("Content-Length")) || undefined; + if (length_1) { + // Calling callback for non-stream response for consistency with browser + onDownloadProgress({ loadedBytes: length_1 }); + } + } + } + return [4 /*yield*/, this.processRequest(operationResponse)]; + case 7: + _e.sent(); + return [2 /*return*/, operationResponse]; + case 8: + error_1 = _e.sent(); + fetchError = error_1; + if (fetchError.code === "ENOTFOUND") { + throw new RestError(fetchError.message, RestError.REQUEST_SEND_ERROR, undefined, httpRequest); + } + else if (fetchError.type === "aborted") { + throw new abortController.AbortError("The operation was aborted."); + } + throw fetchError; + case 9: + // clean up event listener + if (httpRequest.abortSignal && abortListener) { + uploadStreamDone = Promise.resolve(); + if (isReadableStream(body)) { + uploadStreamDone = isStreamComplete(body); + } + downloadStreamDone = Promise.resolve(); + if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) { + downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody); + } + Promise.all([uploadStreamDone, downloadStreamDone]) + .then(function () { + var _a; + (_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); + return; + }) + .catch(function (e) { + logger.warning("Error when cleaning up abortListener on httpRequest", e); + }); + } + return [7 /*endfinally*/]; + case 10: return [2 /*return*/]; } - } - await this.processRequest(operationResponse); - return operationResponse; - } - catch (error) { - const fetchError = error; - if (fetchError.code === "ENOTFOUND") { - throw new RestError(fetchError.message, RestError.REQUEST_SEND_ERROR, undefined, httpRequest); - } - else if (fetchError.type === "aborted") { - throw new abortController.AbortError("The operation was aborted."); - } - throw fetchError; - } - finally { - // clean up event listener - if (httpRequest.abortSignal && abortListener) { - let uploadStreamDone = Promise.resolve(); - if (isReadableStream(body)) { - uploadStreamDone = isStreamComplete(body); - } - let downloadStreamDone = Promise.resolve(); - if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) { - downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody); - } - Promise.all([uploadStreamDone, downloadStreamDone]) - .then(() => { - var _a; - (_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); - return; - }) - .catch((e) => { - logger.warning("Error when cleaning up abortListener on httpRequest", e); - }); - } - } - } -} + }); + }); + }; + return FetchHttpClient; +}()); function isReadableStream(body) { return body && typeof body.pipe === "function"; } function isStreamComplete(stream) { - return new Promise((resolve) => { + return new Promise(function (resolve) { stream.on("close", resolve); stream.on("end", resolve); stream.on("error", resolve); }); } function parseHeaders(headers) { - const httpHeaders = new HttpHeaders(); - headers.forEach((value, key) => { + var httpHeaders = new HttpHeaders(); + headers.forEach(function (value, key) { httpHeaders.set(key, value); }); return httpHeaders; @@ -21382,14 +22028,14 @@ function parseHeaders(headers) { // Copyright (c) Microsoft Corporation. function createProxyAgent(requestUrl, proxySettings, headers) { - const host = URLBuilder.parse(proxySettings.host).getHost(); + var host = URLBuilder.parse(proxySettings.host).getHost(); if (!host) { throw new Error("Expecting a non-empty host in proxy settings."); } if (!isValidPort(proxySettings.port)) { throw new Error("Expecting a valid port number in the range of [0, 65535] in proxy settings."); } - const tunnelOptions = { + var tunnelOptions = { proxy: { host: host, port: proxySettings.port, @@ -21397,21 +22043,18 @@ function createProxyAgent(requestUrl, proxySettings, headers) { } }; if (proxySettings.username && proxySettings.password) { - tunnelOptions.proxy.proxyAuth = `${proxySettings.username}:${proxySettings.password}`; - } - else if (proxySettings.username) { - tunnelOptions.proxy.proxyAuth = `${proxySettings.username}`; + tunnelOptions.proxy.proxyAuth = proxySettings.username + ":" + proxySettings.password; } - const isRequestHttps = isUrlHttps(requestUrl); - const isProxyHttps = isUrlHttps(proxySettings.host); - const proxyAgent = { + var isRequestHttps = isUrlHttps(requestUrl); + var isProxyHttps = isUrlHttps(proxySettings.host); + var proxyAgent = { isHttps: isRequestHttps, agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) }; return proxyAgent; } function isUrlHttps(url) { - const urlScheme = URLBuilder.parse(url).getScheme() || ""; + var urlScheme = URLBuilder.parse(url).getScheme() || ""; return urlScheme.toLowerCase() === "https"; } function createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) { @@ -21438,24 +22081,26 @@ function isValidPort(port) { function getCachedAgent(isHttps, agentCache) { return isHttps ? agentCache.httpsAgent : agentCache.httpAgent; } -class NodeFetchHttpClient extends FetchHttpClient { - constructor() { - super(...arguments); - this.proxyAgents = {}; - this.keepAliveAgents = {}; - this.cookieJar = new tough.CookieJar(undefined, { looseMode: true }); +var NodeFetchHttpClient = /** @class */ (function (_super) { + tslib.__extends(NodeFetchHttpClient, _super); + function NodeFetchHttpClient() { + var _this = _super !== null && _super.apply(this, arguments) || this; + _this.proxyAgents = {}; + _this.keepAliveAgents = {}; + _this.cookieJar = new tough.CookieJar(undefined, { looseMode: true }); + return _this; } - getOrCreateAgent(httpRequest) { - const isHttps = isUrlHttps(httpRequest.url); + NodeFetchHttpClient.prototype.getOrCreateAgent = function (httpRequest) { + var isHttps = isUrlHttps(httpRequest.url); // At the moment, proxy settings and keepAlive are mutually // exclusive because the 'tunnel' library currently lacks the // ability to create a proxy with keepAlive turned on. if (httpRequest.proxySettings) { - let agent = getCachedAgent(isHttps, this.proxyAgents); + var agent = getCachedAgent(isHttps, this.proxyAgents); if (agent) { return agent; } - const tunnel = createProxyAgent(httpRequest.url, httpRequest.proxySettings, httpRequest.headers); + var tunnel = createProxyAgent(httpRequest.url, httpRequest.proxySettings, httpRequest.headers); agent = tunnel.agent; if (tunnel.isHttps) { this.proxyAgents.httpsAgent = tunnel.agent; @@ -21466,11 +22111,11 @@ class NodeFetchHttpClient extends FetchHttpClient { return agent; } else if (httpRequest.keepAlive) { - let agent = getCachedAgent(isHttps, this.keepAliveAgents); + var agent = getCachedAgent(isHttps, this.keepAliveAgents); if (agent) { return agent; } - const agentOptions = { + var agentOptions = { keepAlive: httpRequest.keepAlive }; if (isHttps) { @@ -21484,49 +22129,77 @@ class NodeFetchHttpClient extends FetchHttpClient { else { return isHttps ? https.globalAgent : http.globalAgent; } - } + }; // eslint-disable-next-line @azure/azure-sdk/ts-apisurface-standardized-verbs - async fetch(input, init) { - return node_fetch(input, init); - } - async prepareRequest(httpRequest) { - const requestInit = {}; - if (this.cookieJar && !httpRequest.headers.get("Cookie")) { - const cookieString = await new Promise((resolve, reject) => { - this.cookieJar.getCookieString(httpRequest.url, (err, cookie) => { - if (err) { - reject(err); - } - else { - resolve(cookie); - } - }); + NodeFetchHttpClient.prototype.fetch = function (input, init) { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + return [2 /*return*/, node_fetch(input, init)]; }); - httpRequest.headers.set("Cookie", cookieString); - } - // Set the http(s) agent - requestInit.agent = this.getOrCreateAgent(httpRequest); - requestInit.compress = httpRequest.decompressResponse; - return requestInit; - } - async processRequest(operationResponse) { - if (this.cookieJar) { - const setCookieHeader = operationResponse.headers.get("Set-Cookie"); - if (setCookieHeader !== undefined) { - await new Promise((resolve, reject) => { - this.cookieJar.setCookie(setCookieHeader, operationResponse.request.url, { ignoreError: true }, (err) => { - if (err) { - reject(err); - } - else { - resolve(); - } - }); - }); - } - } - } -} + }); + }; + NodeFetchHttpClient.prototype.prepareRequest = function (httpRequest) { + return tslib.__awaiter(this, void 0, void 0, function () { + var requestInit, cookieString; + var _this = this; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + requestInit = {}; + if (!(this.cookieJar && !httpRequest.headers.get("Cookie"))) return [3 /*break*/, 2]; + return [4 /*yield*/, new Promise(function (resolve, reject) { + _this.cookieJar.getCookieString(httpRequest.url, function (err, cookie) { + if (err) { + reject(err); + } + else { + resolve(cookie); + } + }); + })]; + case 1: + cookieString = _a.sent(); + httpRequest.headers.set("Cookie", cookieString); + _a.label = 2; + case 2: + // Set the http(s) agent + requestInit.agent = this.getOrCreateAgent(httpRequest); + requestInit.compress = httpRequest.decompressResponse; + return [2 /*return*/, requestInit]; + } + }); + }); + }; + NodeFetchHttpClient.prototype.processRequest = function (operationResponse) { + return tslib.__awaiter(this, void 0, void 0, function () { + var setCookieHeader_1; + var _this = this; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + if (!this.cookieJar) return [3 /*break*/, 2]; + setCookieHeader_1 = operationResponse.headers.get("Set-Cookie"); + if (!(setCookieHeader_1 !== undefined)) return [3 /*break*/, 2]; + return [4 /*yield*/, new Promise(function (resolve, reject) { + _this.cookieJar.setCookie(setCookieHeader_1, operationResponse.request.url, { ignoreError: true }, function (err) { + if (err) { + reject(err); + } + else { + resolve(); + } + }); + })]; + case 1: + _a.sent(); + _a.label = 2; + case 2: return [2 /*return*/]; + } + }); + }); + }; + return NodeFetchHttpClient; +}(FetchHttpClient)); // Copyright (c) Microsoft Corporation. (function (HttpPipelineLogLevel) { @@ -21554,10 +22227,10 @@ class NodeFetchHttpClient extends FetchHttpClient { * @param opts - OperationOptions object to convert to RequestOptionsBase */ function operationOptionsToRequestOptionsBase(opts) { - const { requestOptions, tracingOptions } = opts, additionalOptions = tslib.__rest(opts, ["requestOptions", "tracingOptions"]); - let result = additionalOptions; + var requestOptions = opts.requestOptions, tracingOptions = opts.tracingOptions, additionalOptions = tslib.__rest(opts, ["requestOptions", "tracingOptions"]); + var result = additionalOptions; if (requestOptions) { - result = Object.assign(Object.assign({}, result), requestOptions); + result = tslib.__assign(tslib.__assign({}, result), requestOptions); } if (tracingOptions) { result.spanOptions = tracingOptions.spanOptions; @@ -21567,8 +22240,8 @@ function operationOptionsToRequestOptionsBase(opts) { } // Copyright (c) Microsoft Corporation. -class BaseRequestPolicy { - constructor(_nextPolicy, _options) { +var BaseRequestPolicy = /** @class */ (function () { + function BaseRequestPolicy(_nextPolicy, _options) { this._nextPolicy = _nextPolicy; this._options = _options; } @@ -21577,24 +22250,25 @@ class BaseRequestPolicy { * @param logLevel - The log level of the log that will be logged. * @returns Whether or not a log with the provided log level should be logged. */ - shouldLog(logLevel) { + BaseRequestPolicy.prototype.shouldLog = function (logLevel) { return this._options.shouldLog(logLevel); - } + }; /** * Attempt to log the provided message to the provided logger. If no logger was provided or if * the log level does not meat the logger's threshold, then nothing will be logged. * @param logLevel - The log level of this log. * @param message - The message of this log. */ - log(logLevel, message) { + BaseRequestPolicy.prototype.log = function (logLevel, message) { this._options.log(logLevel, message); - } -} + }; + return BaseRequestPolicy; +}()); /** * Optional properties that can be used when creating a RequestPolicy. */ -class RequestPolicyOptions { - constructor(_logger) { +var RequestPolicyOptions = /** @class */ (function () { + function RequestPolicyOptions(_logger) { this._logger = _logger; } /** @@ -21602,89 +22276,104 @@ class RequestPolicyOptions { * @param logLevel - The log level of the log that will be logged. * @returns Whether or not a log with the provided log level should be logged. */ - shouldLog(logLevel) { + RequestPolicyOptions.prototype.shouldLog = function (logLevel) { return (!!this._logger && logLevel !== exports.HttpPipelineLogLevel.OFF && logLevel <= this._logger.minimumLogLevel); - } + }; /** * Attempt to log the provided message to the provided logger. If no logger was provided or if * the log level does not meet the logger's threshold, then nothing will be logged. * @param logLevel - The log level of this log. * @param message - The message of this log. */ - log(logLevel, message) { + RequestPolicyOptions.prototype.log = function (logLevel, message) { if (this._logger && this.shouldLog(logLevel)) { this._logger.log(logLevel, message); } - } -} + }; + return RequestPolicyOptions; +}()); // Copyright (c) Microsoft Corporation. -function logPolicy(loggingOptions = {}) { +function logPolicy(loggingOptions) { + if (loggingOptions === void 0) { loggingOptions = {}; } return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new LogPolicy(nextPolicy, options, loggingOptions); } }; } -class LogPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, { logger: logger$1 = logger.info, allowedHeaderNames = [], allowedQueryParameters = [] } = {}) { - super(nextPolicy, options); - this.logger = logger$1; - this.sanitizer = new Sanitizer({ allowedHeaderNames, allowedQueryParameters }); - } - /** - * Header names whose values will be logged when logging is enabled. Defaults to - * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers - * specified in this field will be added to that list. Any other values will - * be written to logs as "REDACTED". - * @deprecated Pass these into the constructor instead. - */ - get allowedHeaderNames() { - return this.sanitizer.allowedHeaderNames; - } - /** - * Header names whose values will be logged when logging is enabled. Defaults to - * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers - * specified in this field will be added to that list. Any other values will - * be written to logs as "REDACTED". - * @deprecated Pass these into the constructor instead. - */ - set allowedHeaderNames(allowedHeaderNames) { - this.sanitizer.allowedHeaderNames = allowedHeaderNames; - } - /** - * Query string names whose values will be logged when logging is enabled. By default no - * query string values are logged. - * @deprecated Pass these into the constructor instead. - */ - get allowedQueryParameters() { - return this.sanitizer.allowedQueryParameters; - } - /** - * Query string names whose values will be logged when logging is enabled. By default no - * query string values are logged. - * @deprecated Pass these into the constructor instead. - */ - set allowedQueryParameters(allowedQueryParameters) { - this.sanitizer.allowedQueryParameters = allowedQueryParameters; +var LogPolicy = /** @class */ (function (_super) { + tslib.__extends(LogPolicy, _super); + function LogPolicy(nextPolicy, options, _a) { + var _b = _a === void 0 ? {} : _a, _c = _b.logger, logger$1 = _c === void 0 ? logger.info : _c, _d = _b.allowedHeaderNames, allowedHeaderNames = _d === void 0 ? [] : _d, _e = _b.allowedQueryParameters, allowedQueryParameters = _e === void 0 ? [] : _e; + var _this = _super.call(this, nextPolicy, options) || this; + _this.logger = logger$1; + _this.sanitizer = new Sanitizer({ allowedHeaderNames: allowedHeaderNames, allowedQueryParameters: allowedQueryParameters }); + return _this; } - sendRequest(request) { + Object.defineProperty(LogPolicy.prototype, "allowedHeaderNames", { + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + get: function () { + return this.sanitizer.allowedHeaderNames; + }, + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + set: function (allowedHeaderNames) { + this.sanitizer.allowedHeaderNames = allowedHeaderNames; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(LogPolicy.prototype, "allowedQueryParameters", { + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + get: function () { + return this.sanitizer.allowedQueryParameters; + }, + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + set: function (allowedQueryParameters) { + this.sanitizer.allowedQueryParameters = allowedQueryParameters; + }, + enumerable: false, + configurable: true + }); + LogPolicy.prototype.sendRequest = function (request) { + var _this = this; if (!this.logger.enabled) return this._nextPolicy.sendRequest(request); this.logRequest(request); - return this._nextPolicy.sendRequest(request).then((response) => this.logResponse(response)); - } - logRequest(request) { - this.logger(`Request: ${this.sanitizer.sanitize(request)}`); - } - logResponse(response) { - this.logger(`Response status code: ${response.status}`); - this.logger(`Headers: ${this.sanitizer.sanitize(response.headers)}`); + return this._nextPolicy.sendRequest(request).then(function (response) { return _this.logResponse(response); }); + }; + LogPolicy.prototype.logRequest = function (request) { + this.logger("Request: " + this.sanitizer.sanitize(request)); + }; + LogPolicy.prototype.logResponse = function (response) { + this.logger("Response status code: " + response.status); + this.logger("Headers: " + this.sanitizer.sanitize(response.headers)); return response; - } -} + }; + return LogPolicy; +}(BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. @@ -21697,7 +22386,7 @@ function getPathStringFromParameter(parameter) { return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); } function getPathStringFromParameterPath(parameterPath, mapper) { - let result; + var result; if (typeof parameterPath === "string") { result = parameterPath; } @@ -21716,9 +22405,9 @@ function getPathStringFromParameterPath(parameterPath, mapper) { * @internal */ function getStreamResponseStatusCodes(operationSpec) { - const result = new Set(); - for (const statusCode in operationSpec.responses) { - const operationResponse = operationSpec.responses[statusCode]; + var result = new Set(); + for (var statusCode in operationSpec.responses) { + var operationResponse = operationSpec.responses[statusCode]; if (operationResponse.bodyMapper && operationResponse.bodyMapper.type.name === MapperType.Stream) { result.add(Number(statusCode)); @@ -21732,7 +22421,7 @@ function getStreamResponseStatusCodes(operationSpec) { // by the xm2js library is mutable. See https://github.com/Leonidas-from-XIV/node-xml2js/issues/536 // By creating a new copy of the settings each time we instantiate the parser, // we are safeguarding against the possibility of the default settings being mutated elsewhere unintentionally. -const xml2jsDefaultOptionsV2 = { +var xml2jsDefaultOptionsV2 = { explicitCharkey: false, trim: false, normalize: false, @@ -21773,10 +22462,10 @@ const xml2jsDefaultOptionsV2 = { cdata: false }; // The xml2js settings for general XML parsing operations. -const xml2jsParserSettings = Object.assign({}, xml2jsDefaultOptionsV2); +var xml2jsParserSettings = Object.assign({}, xml2jsDefaultOptionsV2); xml2jsParserSettings.explicitArray = false; // The xml2js settings for general XML building operations. -const xml2jsBuilderSettings = Object.assign({}, xml2jsDefaultOptionsV2); +var xml2jsBuilderSettings = Object.assign({}, xml2jsDefaultOptionsV2); xml2jsBuilderSettings.explicitArray = false; xml2jsBuilderSettings.renderOpts = { pretty: false @@ -21786,11 +22475,12 @@ xml2jsBuilderSettings.renderOpts = { * @param obj - JSON object to be converted into XML string * @param opts - Options that govern the parsing of given JSON object */ -function stringifyXML(obj, opts = {}) { +function stringifyXML(obj, opts) { var _a; + if (opts === void 0) { opts = {}; } xml2jsBuilderSettings.rootName = opts.rootName; xml2jsBuilderSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - const builder = new xml2js.Builder(xml2jsBuilderSettings); + var builder = new xml2js.Builder(xml2jsBuilderSettings); return builder.buildObject(obj); } /** @@ -21798,17 +22488,18 @@ function stringifyXML(obj, opts = {}) { * @param str - String containing the XML content to be parsed into JSON * @param opts - Options that govern the parsing of given xml string */ -function parseXML(str, opts = {}) { +function parseXML(str, opts) { var _a; + if (opts === void 0) { opts = {}; } xml2jsParserSettings.explicitRoot = !!opts.includeRoot; xml2jsParserSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - const xmlParser = new xml2js.Parser(xml2jsParserSettings); - return new Promise((resolve, reject) => { + var xmlParser = new xml2js.Parser(xml2jsParserSettings); + return new Promise(function (resolve, reject) { if (!str) { reject(new Error("Document is empty")); } else { - xmlParser.parseString(str, (err, res) => { + xmlParser.parseString(str, function (err, res) { if (err) { reject(err); } @@ -21827,14 +22518,14 @@ function parseXML(str, opts = {}) { */ function deserializationPolicy(deserializationContentTypes, parsingOptions) { return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new DeserializationPolicy(nextPolicy, options, deserializationContentTypes, parsingOptions); } }; } -const defaultJsonContentTypes = ["application/json", "text/json"]; -const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; -const DefaultDeserializationOptions = { +var defaultJsonContentTypes = ["application/json", "text/json"]; +var defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; +var DefaultDeserializationOptions = { expectedContentTypes: { json: defaultJsonContentTypes, xml: defaultXmlContentTypes @@ -21844,28 +22535,39 @@ const DefaultDeserializationOptions = { * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the * HTTP pipeline. */ -class DeserializationPolicy extends BaseRequestPolicy { - constructor(nextPolicy, requestPolicyOptions, deserializationContentTypes, parsingOptions = {}) { +var DeserializationPolicy = /** @class */ (function (_super) { + tslib.__extends(DeserializationPolicy, _super); + function DeserializationPolicy(nextPolicy, requestPolicyOptions, deserializationContentTypes, parsingOptions) { + if (parsingOptions === void 0) { parsingOptions = {}; } var _a; - super(nextPolicy, requestPolicyOptions); - this.jsonContentTypes = + var _this = _super.call(this, nextPolicy, requestPolicyOptions) || this; + _this.jsonContentTypes = (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes; - this.xmlContentTypes = + _this.xmlContentTypes = (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes; - this.xmlCharKey = (_a = parsingOptions.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - } - async sendRequest(request) { - return this._nextPolicy.sendRequest(request).then((response) => deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response, { - xmlCharKey: this.xmlCharKey - })); + _this.xmlCharKey = (_a = parsingOptions.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; + return _this; } -} + DeserializationPolicy.prototype.sendRequest = function (request) { + return tslib.__awaiter(this, void 0, void 0, function () { + var _this = this; + return tslib.__generator(this, function (_a) { + return [2 /*return*/, this._nextPolicy.sendRequest(request).then(function (response) { + return deserializeResponseBody(_this.jsonContentTypes, _this.xmlContentTypes, response, { + xmlCharKey: _this.xmlCharKey + }); + })]; + }); + }); + }; + return DeserializationPolicy; +}(BaseRequestPolicy)); function getOperationResponse(parsedResponse) { - let result; - const request = parsedResponse.request; - const operationSpec = request.operationSpec; + var result; + var request = parsedResponse.request; + var operationSpec = request.operationSpec; if (operationSpec) { - const operationResponseGetter = request.operationResponseGetter; + var operationResponseGetter = request.operationResponseGetter; if (!operationResponseGetter) { result = operationSpec.responses[parsedResponse.status]; } @@ -21876,8 +22578,8 @@ function getOperationResponse(parsedResponse) { return result; } function shouldDeserializeResponse(parsedResponse) { - const shouldDeserialize = parsedResponse.request.shouldDeserialize; - let result; + var shouldDeserialize = parsedResponse.request.shouldDeserialize; + var result; if (shouldDeserialize === undefined) { result = true; } @@ -21889,23 +22591,24 @@ function shouldDeserializeResponse(parsedResponse) { } return result; } -function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options = {}) { +function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options) { var _a, _b, _c; - const updatedOptions = { + if (options === void 0) { options = {}; } + var updatedOptions = { rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY }; - return parse(jsonContentTypes, xmlContentTypes, response, updatedOptions).then((parsedResponse) => { + return parse(jsonContentTypes, xmlContentTypes, response, updatedOptions).then(function (parsedResponse) { if (!shouldDeserializeResponse(parsedResponse)) { return parsedResponse; } - const operationSpec = parsedResponse.request.operationSpec; + var operationSpec = parsedResponse.request.operationSpec; if (!operationSpec || !operationSpec.responses) { return parsedResponse; } - const responseSpec = getOperationResponse(parsedResponse); - const { error, shouldReturnResponse } = handleErrorResponse(parsedResponse, operationSpec, responseSpec); + var responseSpec = getOperationResponse(parsedResponse); + var _a = handleErrorResponse(parsedResponse, operationSpec, responseSpec), error = _a.error, shouldReturnResponse = _a.shouldReturnResponse; if (error) { throw error; } @@ -21916,7 +22619,7 @@ function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, op // use it to deserialize the response. if (responseSpec) { if (responseSpec.bodyMapper) { - let valueToDeserialize = parsedResponse.parsedBody; + var valueToDeserialize = parsedResponse.parsedBody; if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) { valueToDeserialize = typeof valueToDeserialize === "object" @@ -21927,7 +22630,7 @@ function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, op parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody", options); } catch (innerError) { - const restError = new RestError(`Error ${innerError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); + var restError = new RestError("Error " + innerError + " occurred in deserializing the responseBody - " + parsedResponse.bodyAsText, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); throw restError; } } @@ -21943,14 +22646,14 @@ function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, op }); } function isOperationSpecEmpty(operationSpec) { - const expectedStatusCodes = Object.keys(operationSpec.responses); + var expectedStatusCodes = Object.keys(operationSpec.responses); return (expectedStatusCodes.length === 0 || (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default")); } function handleErrorResponse(parsedResponse, operationSpec, responseSpec) { var _a; - const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300; - const isExpectedStatusCode = isOperationSpecEmpty(operationSpec) + var isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300; + var isExpectedStatusCode = isOperationSpecEmpty(operationSpec) ? isSuccessByStatus : !!responseSpec; if (isExpectedStatusCode) { @@ -21963,35 +22666,35 @@ function handleErrorResponse(parsedResponse, operationSpec, responseSpec) { return { error: null, shouldReturnResponse: false }; } } - const errorResponseSpec = responseSpec !== null && responseSpec !== void 0 ? responseSpec : operationSpec.responses.default; - const streaming = ((_a = parsedResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(parsedResponse.status)) || + var errorResponseSpec = responseSpec !== null && responseSpec !== void 0 ? responseSpec : operationSpec.responses.default; + var streaming = ((_a = parsedResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(parsedResponse.status)) || parsedResponse.request.streamResponseBody; - const initialErrorMessage = streaming - ? `Unexpected status code: ${parsedResponse.status}` + var initialErrorMessage = streaming + ? "Unexpected status code: " + parsedResponse.status : parsedResponse.bodyAsText; - const error = new RestError(initialErrorMessage, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); + var error = new RestError(initialErrorMessage, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); // If the item failed but there's no error spec or default spec to deserialize the error, // we should fail so we just throw the parsed response if (!errorResponseSpec) { throw error; } - const defaultBodyMapper = errorResponseSpec.bodyMapper; - const defaultHeadersMapper = errorResponseSpec.headersMapper; + var defaultBodyMapper = errorResponseSpec.bodyMapper; + var defaultHeadersMapper = errorResponseSpec.headersMapper; try { // If error response has a body, try to deserialize it using default body mapper. // Then try to extract error code & message from it if (parsedResponse.parsedBody) { - const parsedBody = parsedResponse.parsedBody; - let parsedError; + var parsedBody = parsedResponse.parsedBody; + var parsedError = void 0; if (defaultBodyMapper) { - let valueToDeserialize = parsedBody; + var valueToDeserialize = parsedBody; if (operationSpec.isXML && defaultBodyMapper.type.name === MapperType.Sequence) { valueToDeserialize = typeof parsedBody === "object" ? parsedBody[defaultBodyMapper.xmlElementName] : []; } parsedError = operationSpec.serializer.deserialize(defaultBodyMapper, valueToDeserialize, "error.response.parsedBody"); } - const internalError = parsedBody.error || parsedError || parsedBody; + var internalError = parsedBody.error || parsedError || parsedBody; error.code = internalError.code; if (internalError.message) { error.message = internalError.message; @@ -22006,36 +22709,36 @@ function handleErrorResponse(parsedResponse, operationSpec, responseSpec) { } } catch (defaultError) { - error.message = `Error "${defaultError.message}" occurred in deserializing the responseBody - "${parsedResponse.bodyAsText}" for the default response.`; + error.message = "Error \"" + defaultError.message + "\" occurred in deserializing the responseBody - \"" + parsedResponse.bodyAsText + "\" for the default response."; } - return { error, shouldReturnResponse: false }; + return { error: error, shouldReturnResponse: false }; } function parse(jsonContentTypes, xmlContentTypes, operationResponse, opts) { var _a; - const errorHandler = (err) => { - const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`; - const errCode = err.code || RestError.PARSE_ERROR; - const e = new RestError(msg, errCode, operationResponse.status, operationResponse.request, operationResponse); + var errorHandler = function (err) { + var msg = "Error \"" + err + "\" occurred while parsing the response body - " + operationResponse.bodyAsText + "."; + var errCode = err.code || RestError.PARSE_ERROR; + var e = new RestError(msg, errCode, operationResponse.status, operationResponse.request, operationResponse); return Promise.reject(e); }; - const streaming = ((_a = operationResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(operationResponse.status)) || + var streaming = ((_a = operationResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(operationResponse.status)) || operationResponse.request.streamResponseBody; if (!streaming && operationResponse.bodyAsText) { - const text = operationResponse.bodyAsText; - const contentType = operationResponse.headers.get("Content-Type") || ""; - const contentComponents = !contentType + var text_1 = operationResponse.bodyAsText; + var contentType = operationResponse.headers.get("Content-Type") || ""; + var contentComponents = !contentType ? [] - : contentType.split(";").map((component) => component.toLowerCase()); + : contentType.split(";").map(function (component) { return component.toLowerCase(); }); if (contentComponents.length === 0 || - contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)) { - return new Promise((resolve) => { - operationResponse.parsedBody = JSON.parse(text); + contentComponents.some(function (component) { return jsonContentTypes.indexOf(component) !== -1; })) { + return new Promise(function (resolve) { + operationResponse.parsedBody = JSON.parse(text_1); resolve(operationResponse); }).catch(errorHandler); } - else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) { - return parseXML(text, opts) - .then((body) => { + else if (contentComponents.some(function (component) { return xmlContentTypes.indexOf(component) !== -1; })) { + return parseXML(text_1, opts) + .then(function (body) { operationResponse.parsedBody = body; return operationResponse; }) @@ -22047,11 +22750,11 @@ function parse(jsonContentTypes, xmlContentTypes, operationResponse, opts) { // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -const DEFAULT_CLIENT_RETRY_COUNT = 3; +var DEFAULT_CLIENT_RETRY_COUNT = 3; // intervals are in ms -const DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; -const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; -const DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; +var DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; +var DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; +var DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; function isNumber(n) { return typeof n === "number"; } @@ -22078,7 +22781,8 @@ function shouldRetry(retryLimit, predicate, retryData, response, error) { * @param retryData - The retry data. * @param err - The operation"s error, if any. */ -function updateRetryData(retryOptions, retryData = { retryCount: 0, retryInterval: 0 }, err) { +function updateRetryData(retryOptions, retryData, err) { + if (retryData === void 0) { retryData = { retryCount: 0, retryInterval: 0 }; } if (err) { if (retryData.error) { err.innerError = retryData.error; @@ -22088,72 +22792,18 @@ function updateRetryData(retryOptions, retryData = { retryCount: 0, retryInterva // Adjust retry count retryData.retryCount++; // Adjust retry interval - let incrementDelta = Math.pow(2, retryData.retryCount - 1) - 1; - const boundedRandDelta = retryOptions.retryInterval * 0.8 + + var incrementDelta = Math.pow(2, retryData.retryCount - 1) - 1; + var boundedRandDelta = retryOptions.retryInterval * 0.8 + Math.floor(Math.random() * (retryOptions.retryInterval * 0.4)); incrementDelta *= boundedRandDelta; retryData.retryInterval = Math.min(retryOptions.minRetryInterval + incrementDelta, retryOptions.maxRetryInterval); return retryData; } -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Helper TypeGuard that checks if the value is not null or undefined. - * @param thing - Anything - * @internal - */ -function isDefined(thing) { - return typeof thing !== "undefined" && thing !== null; -} - -// Copyright (c) Microsoft Corporation. -const StandardAbortMessage = "The operation was aborted."; -/** - * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds. - * @param delayInMs - The number of milliseconds to be delayed. - * @param value - The value to be resolved with after a timeout of t milliseconds. - * @param options - The options for delay - currently abort options - * @param abortSignal - The abortSignal associated with containing operation. - * @param abortErrorMsg - The abort error message associated with containing operation. - * @returns - Resolved promise - */ -function delay(delayInMs, value, options) { - return new Promise((resolve, reject) => { - let timer = undefined; - let onAborted = undefined; - const rejectOnAbort = () => { - return reject(new abortController.AbortError((options === null || options === void 0 ? void 0 : options.abortErrorMsg) ? options === null || options === void 0 ? void 0 : options.abortErrorMsg : StandardAbortMessage)); - }; - const removeListeners = () => { - if ((options === null || options === void 0 ? void 0 : options.abortSignal) && onAborted) { - options.abortSignal.removeEventListener("abort", onAborted); - } - }; - onAborted = () => { - if (isDefined(timer)) { - clearTimeout(timer); - } - removeListeners(); - return rejectOnAbort(); - }; - if ((options === null || options === void 0 ? void 0 : options.abortSignal) && options.abortSignal.aborted) { - return rejectOnAbort(); - } - timer = setTimeout(() => { - removeListeners(); - resolve(value); - }, delayInMs); - if (options === null || options === void 0 ? void 0 : options.abortSignal) { - options.abortSignal.addEventListener("abort", onAborted); - } - }); -} - // Copyright (c) Microsoft Corporation. function exponentialRetryPolicy(retryCount, retryInterval, maxRetryInterval) { return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, maxRetryInterval); } }; @@ -22161,7 +22811,7 @@ function exponentialRetryPolicy(retryCount, retryInterval, maxRetryInterval) { (function (RetryMode) { RetryMode[RetryMode["Exponential"] = 0] = "Exponential"; })(exports.RetryMode || (exports.RetryMode = {})); -const DefaultRetryOptions = { +var DefaultRetryOptions = { maxRetries: DEFAULT_CLIENT_RETRY_COUNT, retryDelayInMs: DEFAULT_CLIENT_RETRY_INTERVAL, maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL @@ -22169,7 +22819,8 @@ const DefaultRetryOptions = { /** * Instantiates a new "ExponentialRetryPolicyFilter" instance. */ -class ExponentialRetryPolicy extends BaseRequestPolicy { +var ExponentialRetryPolicy = /** @class */ (function (_super) { + tslib.__extends(ExponentialRetryPolicy, _super); /** * @param nextPolicy - The next RequestPolicy in the pipeline chain. * @param options - The options for this RequestPolicy. @@ -22178,188 +22829,217 @@ class ExponentialRetryPolicy extends BaseRequestPolicy { * @param minRetryInterval - The minimum retry interval, in milliseconds. * @param maxRetryInterval - The maximum retry interval, in milliseconds. */ - constructor(nextPolicy, options, retryCount, retryInterval, maxRetryInterval) { - super(nextPolicy, options); - this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; - this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; - this.maxRetryInterval = isNumber(maxRetryInterval) + function ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, maxRetryInterval) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + _this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + _this.maxRetryInterval = isNumber(maxRetryInterval) ? maxRetryInterval : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + return _this; } - sendRequest(request) { + ExponentialRetryPolicy.prototype.sendRequest = function (request) { + var _this = this; return this._nextPolicy .sendRequest(request.clone()) - .then((response) => retry(this, request, response)) - .catch((error) => retry(this, request, error.response, undefined, error)); - } -} -async function retry(policy, request, response, retryData, requestError) { - function shouldPolicyRetry(responseParam) { - const statusCode = responseParam === null || responseParam === void 0 ? void 0 : responseParam.status; - if (statusCode === 503 && (response === null || response === void 0 ? void 0 : response.headers.get(Constants.HeaderConstants.RETRY_AFTER))) { - return false; - } - if (statusCode === undefined || - (statusCode < 500 && statusCode !== 408) || - statusCode === 501 || - statusCode === 505) { - return false; - } - return true; - } - retryData = updateRetryData({ - retryInterval: policy.retryInterval, - minRetryInterval: 0, - maxRetryInterval: policy.maxRetryInterval - }, retryData, requestError); - const isAborted = request.abortSignal && request.abortSignal.aborted; - if (!isAborted && shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, response)) { - logger.info(`Retrying request in ${retryData.retryInterval}`); - try { - await delay(retryData.retryInterval); - const res = await policy._nextPolicy.sendRequest(request.clone()); - return retry(policy, request, res, retryData); - } - catch (err) { - return retry(policy, request, response, retryData, err); + .then(function (response) { return retry(_this, request, response); }) + .catch(function (error) { return retry(_this, request, error.response, undefined, error); }); + }; + return ExponentialRetryPolicy; +}(BaseRequestPolicy)); +function retry(policy, request, response, retryData, requestError) { + return tslib.__awaiter(this, void 0, void 0, function () { + function shouldPolicyRetry(responseParam) { + var statusCode = responseParam === null || responseParam === void 0 ? void 0 : responseParam.status; + if (statusCode === undefined || + (statusCode < 500 && statusCode !== 408) || + statusCode === 501 || + statusCode === 505) { + return false; + } + return true; } - } - else if (isAborted || requestError || !response) { - // If the operation failed in the end, return all errors instead of just the last one - const err = retryData.error || - new RestError("Failed to send the request.", RestError.REQUEST_SEND_ERROR, response && response.status, response && response.request, response); - throw err; - } - else { - return response; - } + var isAborted, res, err_1, err; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + retryData = updateRetryData({ + retryInterval: policy.retryInterval, + minRetryInterval: 0, + maxRetryInterval: policy.maxRetryInterval + }, retryData, requestError); + isAborted = request.abortSignal && request.abortSignal.aborted; + if (!(!isAborted && shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, response))) return [3 /*break*/, 6]; + logger.info("Retrying request in " + retryData.retryInterval); + _a.label = 1; + case 1: + _a.trys.push([1, 4, , 5]); + return [4 /*yield*/, delay(retryData.retryInterval)]; + case 2: + _a.sent(); + return [4 /*yield*/, policy._nextPolicy.sendRequest(request.clone())]; + case 3: + res = _a.sent(); + return [2 /*return*/, retry(policy, request, res, retryData)]; + case 4: + err_1 = _a.sent(); + return [2 /*return*/, retry(policy, request, response, retryData, err_1)]; + case 5: return [3 /*break*/, 7]; + case 6: + if (isAborted || requestError || !response) { + err = retryData.error || + new RestError("Failed to send the request.", RestError.REQUEST_SEND_ERROR, response && response.status, response && response.request, response); + throw err; + } + else { + return [2 /*return*/, response]; + } + case 7: return [2 /*return*/]; + } + }); + }); } // Copyright (c) Microsoft Corporation. -function generateClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { +function generateClientRequestIdPolicy(requestIdHeaderName) { + if (requestIdHeaderName === void 0) { requestIdHeaderName = "x-ms-client-request-id"; } return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); } }; } -class GenerateClientRequestIdPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, _requestIdHeaderName) { - super(nextPolicy, options); - this._requestIdHeaderName = _requestIdHeaderName; +var GenerateClientRequestIdPolicy = /** @class */ (function (_super) { + tslib.__extends(GenerateClientRequestIdPolicy, _super); + function GenerateClientRequestIdPolicy(nextPolicy, options, _requestIdHeaderName) { + var _this = _super.call(this, nextPolicy, options) || this; + _this._requestIdHeaderName = _requestIdHeaderName; + return _this; } - sendRequest(request) { + GenerateClientRequestIdPolicy.prototype.sendRequest = function (request) { if (!request.headers.contains(this._requestIdHeaderName)) { request.headers.set(this._requestIdHeaderName, request.requestId); } return this._nextPolicy.sendRequest(request); - } -} + }; + return GenerateClientRequestIdPolicy; +}(BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. function getDefaultUserAgentKey() { return Constants.HeaderConstants.USER_AGENT; } function getPlatformSpecificData() { - const runtimeInfo = { + var runtimeInfo = { key: "Node", value: process.version }; - const osInfo = { + var osInfo = { key: "OS", - value: `(${os.arch()}-${os.type()}-${os.release()})` + value: "(" + os.arch() + "-" + os.type() + "-" + os.release() + ")" }; return [runtimeInfo, osInfo]; } // Copyright (c) Microsoft Corporation. function getRuntimeInfo() { - const msRestRuntime = { + var msRestRuntime = { key: "core-http", value: Constants.coreHttpVersion }; return [msRestRuntime]; } -function getUserAgentString(telemetryInfo, keySeparator = " ", valueSeparator = "/") { +function getUserAgentString(telemetryInfo, keySeparator, valueSeparator) { + if (keySeparator === void 0) { keySeparator = " "; } + if (valueSeparator === void 0) { valueSeparator = "/"; } return telemetryInfo - .map((info) => { - const value = info.value ? `${valueSeparator}${info.value}` : ""; - return `${info.key}${value}`; + .map(function (info) { + var value = info.value ? "" + valueSeparator + info.value : ""; + return "" + info.key + value; }) .join(keySeparator); } -const getDefaultUserAgentHeaderName = getDefaultUserAgentKey; +var getDefaultUserAgentHeaderName = getDefaultUserAgentKey; function getDefaultUserAgentValue() { - const runtimeInfo = getRuntimeInfo(); - const platformSpecificData = getPlatformSpecificData(); - const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); + var runtimeInfo = getRuntimeInfo(); + var platformSpecificData = getPlatformSpecificData(); + var userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); return userAgent; } function userAgentPolicy(userAgentData) { - const key = !userAgentData || userAgentData.key === undefined || userAgentData.key === null + var key = !userAgentData || userAgentData.key === undefined || userAgentData.key === null ? getDefaultUserAgentKey() : userAgentData.key; - const value = !userAgentData || userAgentData.value === undefined || userAgentData.value === null + var value = !userAgentData || userAgentData.value === undefined || userAgentData.value === null ? getDefaultUserAgentValue() : userAgentData.value; return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new UserAgentPolicy(nextPolicy, options, key, value); } }; } -class UserAgentPolicy extends BaseRequestPolicy { - constructor(_nextPolicy, _options, headerKey, headerValue) { - super(_nextPolicy, _options); - this._nextPolicy = _nextPolicy; - this._options = _options; - this.headerKey = headerKey; - this.headerValue = headerValue; +var UserAgentPolicy = /** @class */ (function (_super) { + tslib.__extends(UserAgentPolicy, _super); + function UserAgentPolicy(_nextPolicy, _options, headerKey, headerValue) { + var _this = _super.call(this, _nextPolicy, _options) || this; + _this._nextPolicy = _nextPolicy; + _this._options = _options; + _this.headerKey = headerKey; + _this.headerValue = headerValue; + return _this; } - sendRequest(request) { + UserAgentPolicy.prototype.sendRequest = function (request) { this.addUserAgentHeader(request); return this._nextPolicy.sendRequest(request); - } - addUserAgentHeader(request) { + }; + UserAgentPolicy.prototype.addUserAgentHeader = function (request) { if (!request.headers) { request.headers = new HttpHeaders(); } if (!request.headers.get(this.headerKey) && this.headerValue) { request.headers.set(this.headerKey, this.headerValue); } - } -} + }; + return UserAgentPolicy; +}(BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. /** * Methods that are allowed to follow redirects 301 and 302 */ -const allowedRedirect = ["GET", "HEAD"]; -const DefaultRedirectOptions = { +var allowedRedirect = ["GET", "HEAD"]; +var DefaultRedirectOptions = { handleRedirects: true, maxRetries: 20 }; -function redirectPolicy(maximumRetries = 20) { +function redirectPolicy(maximumRetries) { + if (maximumRetries === void 0) { maximumRetries = 20; } return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new RedirectPolicy(nextPolicy, options, maximumRetries); } }; } -class RedirectPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, maxRetries = 20) { - super(nextPolicy, options); - this.maxRetries = maxRetries; +var RedirectPolicy = /** @class */ (function (_super) { + tslib.__extends(RedirectPolicy, _super); + function RedirectPolicy(nextPolicy, options, maxRetries) { + if (maxRetries === void 0) { maxRetries = 20; } + var _this = _super.call(this, nextPolicy, options) || this; + _this.maxRetries = maxRetries; + return _this; } - sendRequest(request) { + RedirectPolicy.prototype.sendRequest = function (request) { + var _this = this; return this._nextPolicy .sendRequest(request) - .then((response) => handleRedirect(this, response, 0)); - } -} + .then(function (response) { return handleRedirect(_this, response, 0); }); + }; + return RedirectPolicy; +}(BaseRequestPolicy)); function handleRedirect(policy, response, currentRetries) { - const { request, status } = response; - const locationHeader = response.headers.get("location"); + var request = response.request, status = response.status; + var locationHeader = response.headers.get("location"); if (locationHeader && (status === 300 || (status === 301 && allowedRedirect.includes(request.method)) || @@ -22367,7 +23047,7 @@ function handleRedirect(policy, response, currentRetries) { (status === 303 && request.method === "POST") || status === 307) && (!policy.maxRetries || currentRetries < policy.maxRetries)) { - const builder = URLBuilder.parse(request.url); + var builder = URLBuilder.parse(request.url); builder.setPath(locationHeader); request.url = builder.toString(); // POST request with Status code 303 should be converted into a @@ -22378,41 +23058,47 @@ function handleRedirect(policy, response, currentRetries) { } return policy._nextPolicy .sendRequest(request) - .then((res) => handleRedirect(policy, res, currentRetries + 1)); + .then(function (res) { return handleRedirect(policy, res, currentRetries + 1); }); } return Promise.resolve(response); } // Copyright (c) Microsoft Corporation. -function rpRegistrationPolicy(retryTimeout = 30) { +function rpRegistrationPolicy(retryTimeout) { + if (retryTimeout === void 0) { retryTimeout = 30; } return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); } }; } -class RPRegistrationPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, _retryTimeout = 30) { - super(nextPolicy, options); - this._retryTimeout = _retryTimeout; +var RPRegistrationPolicy = /** @class */ (function (_super) { + tslib.__extends(RPRegistrationPolicy, _super); + function RPRegistrationPolicy(nextPolicy, options, _retryTimeout) { + if (_retryTimeout === void 0) { _retryTimeout = 30; } + var _this = _super.call(this, nextPolicy, options) || this; + _this._retryTimeout = _retryTimeout; + return _this; } - sendRequest(request) { + RPRegistrationPolicy.prototype.sendRequest = function (request) { + var _this = this; return this._nextPolicy .sendRequest(request.clone()) - .then((response) => registerIfNeeded(this, request, response)); - } -} + .then(function (response) { return registerIfNeeded(_this, request, response); }); + }; + return RPRegistrationPolicy; +}(BaseRequestPolicy)); function registerIfNeeded(policy, request, response) { if (response.status === 409) { - const rpName = checkRPNotRegisteredError(response.bodyAsText); + var rpName = checkRPNotRegisteredError(response.bodyAsText); if (rpName) { - const urlPrefix = extractSubscriptionUrl(request.url); + var urlPrefix = extractSubscriptionUrl(request.url); return (registerRP(policy, urlPrefix, rpName, request) // Autoregistration of ${provider} failed for some reason. We will not return this error // instead will return the initial response with 409 status code back to the user. // do nothing here as we are returning the original response at the end of this method. - .catch(() => false) - .then((registrationStatus) => { + .catch(function () { return false; }) + .then(function (registrationStatus) { if (registrationStatus) { // Retry the original request. We have to change the x-ms-client-request-id // otherwise Azure endpoint will return the initial 409 (cached) response. @@ -22431,8 +23117,9 @@ function registerIfNeeded(policy, request, response) { * @param reuseUrlToo - Should the url from the original request be reused as well. Default false. * @returns A new request object with desired headers. */ -function getRequestEssentials(originalRequest, reuseUrlToo = false) { - const reqOptions = originalRequest.clone(); +function getRequestEssentials(originalRequest, reuseUrlToo) { + if (reuseUrlToo === void 0) { reuseUrlToo = false; } + var reqOptions = originalRequest.clone(); if (reuseUrlToo) { reqOptions.url = originalRequest.url; } @@ -22450,7 +23137,7 @@ function getRequestEssentials(originalRequest, reuseUrlToo = false) { * @returns The name of the RP if condition is satisfied else undefined. */ function checkRPNotRegisteredError(body) { - let result, responseBody; + var result, responseBody; if (body) { try { responseBody = JSON.parse(body); @@ -22463,7 +23150,7 @@ function checkRPNotRegisteredError(body) { responseBody.error.message && responseBody.error.code && responseBody.error.code === "MissingSubscriptionRegistration") { - const matchRes = responseBody.error.message.match(/.*'(.*)'/i); + var matchRes = responseBody.error.message.match(/.*'(.*)'/i); if (matchRes) { result = matchRes.pop(); } @@ -22478,13 +23165,13 @@ function checkRPNotRegisteredError(body) { * @returns The url prefix as explained above. */ function extractSubscriptionUrl(url) { - let result; - const matchRes = url.match(/.*\/subscriptions\/[a-f0-9-]+\//gi); + var result; + var matchRes = url.match(/.*\/subscriptions\/[a-f0-9-]+\//gi); if (matchRes && matchRes[0]) { result = matchRes[0]; } else { - throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`); + throw new Error("Unable to extract subscriptionId from the given url - " + url + "."); } return result; } @@ -22495,18 +23182,20 @@ function extractSubscriptionUrl(url) { * @param provider - The provider name to be registered. * @param originalRequest - The original request sent by the user that returned a 409 response * with a message that the provider is not registered. + * @param callback - The callback that handles the RP registration */ -async function registerRP(policy, urlPrefix, provider, originalRequest) { - const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`; - const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`; - const reqOptions = getRequestEssentials(originalRequest); +function registerRP(policy, urlPrefix, provider, originalRequest) { + var postUrl = urlPrefix + "providers/" + provider + "/register?api-version=2016-02-01"; + var getUrl = urlPrefix + "providers/" + provider + "?api-version=2016-02-01"; + var reqOptions = getRequestEssentials(originalRequest); reqOptions.method = "POST"; reqOptions.url = postUrl; - const response = await policy._nextPolicy.sendRequest(reqOptions); - if (response.status !== 200) { - throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`); - } - return getRegistrationStatus(policy, getUrl, originalRequest); + return policy._nextPolicy.sendRequest(reqOptions).then(function (response) { + if (response.status !== 200) { + throw new Error("Autoregistration of " + provider + " failed. Please try registering manually."); + } + return getRegistrationStatus(policy, getUrl, originalRequest); + }); } /** * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds. @@ -22517,24 +23206,25 @@ async function registerRP(policy, urlPrefix, provider, originalRequest) { * with a message that the provider is not registered. * @returns True if RP Registration is successful. */ -async function getRegistrationStatus(policy, url, originalRequest) { - const reqOptions = getRequestEssentials(originalRequest); +function getRegistrationStatus(policy, url, originalRequest) { + var reqOptions = getRequestEssentials(originalRequest); reqOptions.url = url; reqOptions.method = "GET"; - const res = await policy._nextPolicy.sendRequest(reqOptions); - const obj = res.parsedBody; - if (res.parsedBody && obj.registrationState && obj.registrationState === "Registered") { - return true; - } - else { - await delay(policy._retryTimeout * 1000); - return getRegistrationStatus(policy, url, originalRequest); - } + return policy._nextPolicy.sendRequest(reqOptions).then(function (res) { + var obj = res.parsedBody; + if (res.parsedBody && obj.registrationState && obj.registrationState === "Registered") { + return true; + } + else { + return delay(policy._retryTimeout * 1000) + .then(function () { return getRegistrationStatus(policy, url, originalRequest); }); + } + }); } // Copyright (c) Microsoft Corporation. // Default options for the cycler if none are provided -const DEFAULT_CYCLER_OPTIONS = { +var DEFAULT_CYCLER_OPTIONS = { forcedRefreshWindowInMs: 1000, retryIntervalInMs: 3000, refreshWindowInMs: 1000 * 60 * 2 // Start refreshing 2m before expiry @@ -22552,33 +23242,59 @@ const DEFAULT_CYCLER_OPTIONS = { * throwing an exception * @returns - a promise that, if it resolves, will resolve with an access token */ -async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { - // This wrapper handles exceptions gracefully as long as we haven't exceeded - // the timeout. - async function tryGetAccessToken() { - if (Date.now() < timeoutInMs) { - try { - return await getAccessToken(); - } - catch (_a) { - return null; - } +function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { + return tslib.__awaiter(this, void 0, void 0, function () { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + function tryGetAccessToken() { + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, finalToken; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + if (!(Date.now() < timeoutInMs)) return [3 /*break*/, 5]; + _b.label = 1; + case 1: + _b.trys.push([1, 3, , 4]); + return [4 /*yield*/, getAccessToken()]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + _a = _b.sent(); + return [2 /*return*/, null]; + case 4: return [3 /*break*/, 7]; + case 5: return [4 /*yield*/, getAccessToken()]; + case 6: + finalToken = _b.sent(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return [2 /*return*/, finalToken]; + case 7: return [2 /*return*/]; + } + }); + }); } - else { - const finalToken = await getAccessToken(); - // Timeout is up, so throw if it's still null - if (finalToken === null) { - throw new Error("Failed to refresh access token."); + var token; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, tryGetAccessToken()]; + case 1: + token = _a.sent(); + _a.label = 2; + case 2: + if (!(token === null)) return [3 /*break*/, 5]; + return [4 /*yield*/, delay(retryIntervalInMs)]; + case 3: + _a.sent(); + return [4 /*yield*/, tryGetAccessToken()]; + case 4: + token = _a.sent(); + return [3 /*break*/, 2]; + case 5: return [2 /*return*/, token]; } - return finalToken; - } - } - let token = await tryGetAccessToken(); - while (token === null) { - await delay(retryIntervalInMs); - token = await tryGetAccessToken(); - } - return token; + }); + }); } /** * Creates a token cycler from a credential, scopes, and optional settings. @@ -22596,14 +23312,15 @@ async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { * @returns - a function that reliably produces a valid access token */ function createTokenCycler(credential, scopes, tokenCyclerOptions) { - let refreshWorker = null; - let token = null; - const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + var _this = this; + var refreshWorker = null; + var token = null; + var options = tslib.__assign(tslib.__assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); /** * This little holder defines several predicates that we use to construct * the rules of refreshing the token. */ - const cycler = { + var cycler = { /** * Produces true if a refresh job is currently in progress. */ @@ -22635,18 +23352,20 @@ function createTokenCycler(credential, scopes, tokenCyclerOptions) { var _a; if (!cycler.isRefreshing) { // We bind `scopes` here to avoid passing it around a lot - const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + var tryGetAccessToken = function () { + return credential.getToken(scopes, getTokenOptions); + }; // Take advantage of promise chaining to insert an assignment to `token` // before the refresh can be considered done. refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, // If we don't have a token, then we should timeout immediately (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) - .then((_token) => { + .then(function (_token) { refreshWorker = null; token = _token; return token; }) - .catch((reason) => { + .catch(function (reason) { // We also should reset the refresher if we enter a failed state. All // existing awaiters will throw, but subsequent requests will start a // new retry chain. @@ -22657,23 +23376,25 @@ function createTokenCycler(credential, scopes, tokenCyclerOptions) { } return refreshWorker; } - return async (tokenOptions) => { - // - // Simple rules: - // - If we MUST refresh, then return the refresh task, blocking - // the pipeline until a token is available. - // - If we SHOULD refresh, then run refresh but don't return it - // (we can still use the cached token). - // - Return the token, since it's fine if we didn't return in - // step 1. - // - if (cycler.mustRefresh) - return refresh(tokenOptions); - if (cycler.shouldRefresh) { - refresh(tokenOptions); - } - return token; - }; + return function (tokenOptions) { return tslib.__awaiter(_this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + if (cycler.mustRefresh) + return [2 /*return*/, refresh(tokenOptions)]; + if (cycler.shouldRefresh) { + refresh(tokenOptions); + } + return [2 /*return*/, token]; + }); + }); }; } // #endregion /** @@ -22685,28 +23406,36 @@ function createTokenCycler(credential, scopes, tokenCyclerOptions) { */ function bearerTokenAuthenticationPolicy(credential, scopes) { // This simple function encapsulates the entire process of reliably retrieving the token - const getToken = createTokenCycler(credential, scopes /* , options */); - class BearerTokenAuthenticationPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - async sendRequest(webResource) { - if (!webResource.url.toLowerCase().startsWith("https://")) { - throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); - } - const { token } = await getToken({ - abortSignal: webResource.abortSignal, - tracingOptions: { - spanOptions: webResource.spanOptions, - tracingContext: webResource.tracingContext - } + var getToken = createTokenCycler(credential, scopes /* , options */); + var BearerTokenAuthenticationPolicy = /** @class */ (function (_super) { + tslib.__extends(BearerTokenAuthenticationPolicy, _super); + function BearerTokenAuthenticationPolicy(nextPolicy, options) { + return _super.call(this, nextPolicy, options) || this; + } + BearerTokenAuthenticationPolicy.prototype.sendRequest = function (webResource) { + return tslib.__awaiter(this, void 0, void 0, function () { + var token; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, getToken({ + abortSignal: webResource.abortSignal, + tracingOptions: { + spanOptions: webResource.spanOptions, + tracingContext: webResource.tracingContext + } + })]; + case 1: + token = (_a.sent()).token; + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, "Bearer " + token); + return [2 /*return*/, this._nextPolicy.sendRequest(webResource)]; + } + }); }); - webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); - return this._nextPolicy.sendRequest(webResource); - } - } + }; + return BearerTokenAuthenticationPolicy; + }(BaseRequestPolicy)); return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new BearerTokenAuthenticationPolicy(nextPolicy, options); } }; @@ -22715,7 +23444,7 @@ function bearerTokenAuthenticationPolicy(credential, scopes) { // Copyright (c) Microsoft Corporation. function systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); } }; @@ -22726,55 +23455,69 @@ function systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, max * @param minRetryInterval - The minimum retry interval, in milliseconds. * @param maxRetryInterval - The maximum retry interval, in milliseconds. */ -class SystemErrorRetryPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { - super(nextPolicy, options); - this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; - this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; - this.minRetryInterval = isNumber(minRetryInterval) +var SystemErrorRetryPolicy = /** @class */ (function (_super) { + tslib.__extends(SystemErrorRetryPolicy, _super); + function SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + _this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + _this.minRetryInterval = isNumber(minRetryInterval) ? minRetryInterval : DEFAULT_CLIENT_MIN_RETRY_INTERVAL; - this.maxRetryInterval = isNumber(maxRetryInterval) + _this.maxRetryInterval = isNumber(maxRetryInterval) ? maxRetryInterval : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + return _this; } - sendRequest(request) { + SystemErrorRetryPolicy.prototype.sendRequest = function (request) { + var _this = this; return this._nextPolicy .sendRequest(request.clone()) - .catch((error) => retry$1(this, request, error.response, error)); - } -} -async function retry$1(policy, request, operationResponse, err, retryData) { - retryData = updateRetryData(policy, retryData, err); - function shouldPolicyRetry(_response, error) { - if (error && - error.code && - (error.code === "ETIMEDOUT" || - error.code === "ESOCKETTIMEDOUT" || - error.code === "ECONNREFUSED" || - error.code === "ECONNRESET" || - error.code === "ENOENT")) { - return true; - } - return false; - } - if (shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, operationResponse, err)) { - // If previous operation ended with an error and the policy allows a retry, do that - try { - await delay(retryData.retryInterval); - return policy._nextPolicy.sendRequest(request.clone()); - } - catch (nestedErr) { - return retry$1(policy, request, operationResponse, nestedErr, retryData); - } - } - else { - if (err) { - // If the operation failed in the end, return all errors instead of just the last one - return Promise.reject(retryData.error); + .catch(function (error) { return retry$1(_this, request, error.response, error); }); + }; + return SystemErrorRetryPolicy; +}(BaseRequestPolicy)); +function retry$1(policy, request, operationResponse, err, retryData) { + return tslib.__awaiter(this, void 0, void 0, function () { + function shouldPolicyRetry(_response, error) { + if (error && + error.code && + (error.code === "ETIMEDOUT" || + error.code === "ESOCKETTIMEDOUT" || + error.code === "ECONNREFUSED" || + error.code === "ECONNRESET" || + error.code === "ENOENT")) { + return true; + } + return false; } - return operationResponse; - } + var nestedErr_1; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + retryData = updateRetryData(policy, retryData, err); + if (!shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, operationResponse, err)) return [3 /*break*/, 5]; + _a.label = 1; + case 1: + _a.trys.push([1, 3, , 4]); + return [4 /*yield*/, delay(retryData.retryInterval)]; + case 2: + _a.sent(); + return [2 /*return*/, policy._nextPolicy.sendRequest(request.clone())]; + case 3: + nestedErr_1 = _a.sent(); + return [2 /*return*/, retry$1(policy, request, operationResponse, nestedErr_1, retryData)]; + case 4: return [3 /*break*/, 6]; + case 5: + if (err) { + // If the operation failed in the end, return all errors instead of just the last one + return [2 /*return*/, Promise.reject(retryData.error)]; + } + return [2 /*return*/, operationResponse]; + case 6: return [2 /*return*/]; + } + }); + }); } // Copyright (c) Microsoft Corporation. @@ -22788,37 +23531,34 @@ async function retry$1(policy, request, operationResponse, err, retryData) { // Copyright (c) Microsoft Corporation. /** - * Stores the patterns specified in NO_PROXY environment variable. * @internal */ -const globalNoProxyList = []; -let noProxyListLoaded = false; -/** A cache of whether a host should bypass the proxy. */ -const globalBypassedMap = new Map(); +var noProxyList = []; +var noProxyListLoaded = false; +var byPassedList = new Map(); function loadEnvironmentProxyValue() { if (!process) { return undefined; } - const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); - const allProxy = getEnvironmentValue(Constants.ALL_PROXY); - const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); + var httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); + var allProxy = getEnvironmentValue(Constants.ALL_PROXY); + var httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); return httpsProxy || allProxy || httpProxy; } -/** - * Check whether the host of a given `uri` matches any pattern in the no proxy list. - * If there's a match, any request sent to the same host shouldn't have the proxy settings set. - * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 - */ -function isBypassed(uri, noProxyList, bypassedMap) { +// Check whether the host of a given `uri` is in the noProxyList. +// If there's a match, any request sent to the same host won't have the proxy settings set. +// This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 +function isBypassed(uri) { if (noProxyList.length === 0) { return false; } - const host = URLBuilder.parse(uri).getHost(); - if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { - return bypassedMap.get(host); + var host = URLBuilder.parse(uri).getHost(); + if (byPassedList.has(host)) { + return byPassedList.get(host); } - let isBypassedFlag = false; - for (const pattern of noProxyList) { + var isBypassedFlag = false; + for (var _i = 0, noProxyList_1 = noProxyList; _i < noProxyList_1.length; _i++) { + var pattern = noProxyList_1[_i]; if (pattern[0] === ".") { // This should match either domain it self or any subdomain or host // .foo.com will match foo.com it self or *.foo.com @@ -22837,20 +23577,20 @@ function isBypassed(uri, noProxyList, bypassedMap) { } } } - bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); + byPassedList.set(host, isBypassedFlag); return isBypassedFlag; } /** * @internal */ function loadNoProxy() { - const noProxy = getEnvironmentValue(Constants.NO_PROXY); + var noProxy = getEnvironmentValue(Constants.NO_PROXY); noProxyListLoaded = true; if (noProxy) { return noProxy .split(",") - .map((item) => item.trim()) - .filter((item) => item.length); + .map(function (item) { return item.trim(); }) + .filter(function (item) { return item.length; }); } return []; } @@ -22861,184 +23601,174 @@ function getDefaultProxySettings(proxyUrl) { return undefined; } } - const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl); - const parsedUrl = URLBuilder.parse(urlWithoutAuth); - const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; + var _a = extractAuthFromUrl(proxyUrl), username = _a.username, password = _a.password, urlWithoutAuth = _a.urlWithoutAuth; + var parsedUrl = URLBuilder.parse(urlWithoutAuth); + var schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; return { host: schema + parsedUrl.getHost(), port: Number.parseInt(parsedUrl.getPort() || "80"), - username, - password + username: username, + password: password }; } -/** - * A policy that allows one to apply proxy settings to all requests. - * If not passed static settings, they will be retrieved from the HTTPS_PROXY - * or HTTP_PROXY environment variables. - * @param proxySettings - ProxySettings to use on each request. - * @param options - additional settings, for example, custom NO_PROXY patterns - */ -function proxyPolicy(proxySettings, options) { +function proxyPolicy(proxySettings) { if (!proxySettings) { proxySettings = getDefaultProxySettings(); } if (!noProxyListLoaded) { - globalNoProxyList.push(...loadNoProxy()); + noProxyList.push.apply(noProxyList, loadNoProxy()); } return { - create: (nextPolicy, requestPolicyOptions) => { - return new ProxyPolicy(nextPolicy, requestPolicyOptions, proxySettings, options === null || options === void 0 ? void 0 : options.customNoProxyList); + create: function (nextPolicy, options) { + return new ProxyPolicy(nextPolicy, options, proxySettings); } }; } function extractAuthFromUrl(url) { - const atIndex = url.indexOf("@"); + var atIndex = url.indexOf("@"); if (atIndex === -1) { return { urlWithoutAuth: url }; } - const schemeIndex = url.indexOf("://"); - const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; - const auth = url.substring(authStart, atIndex); - const colonIndex = auth.indexOf(":"); - const hasPassword = colonIndex !== -1; - const username = hasPassword ? auth.substring(0, colonIndex) : auth; - const password = hasPassword ? auth.substring(colonIndex + 1) : undefined; - const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); + var schemeIndex = url.indexOf("://"); + var authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; + var auth = url.substring(authStart, atIndex); + var colonIndex = auth.indexOf(":"); + var hasPassword = colonIndex !== -1; + var username = hasPassword ? auth.substring(0, colonIndex) : auth; + var password = hasPassword ? auth.substring(colonIndex + 1) : undefined; + var urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); return { - username, - password, - urlWithoutAuth + username: username, + password: password, + urlWithoutAuth: urlWithoutAuth }; } -class ProxyPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, proxySettings, customNoProxyList) { - super(nextPolicy, options); - this.proxySettings = proxySettings; - this.customNoProxyList = customNoProxyList; +var ProxyPolicy = /** @class */ (function (_super) { + tslib.__extends(ProxyPolicy, _super); + function ProxyPolicy(nextPolicy, options, proxySettings) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.proxySettings = proxySettings; + return _this; } - sendRequest(request) { - var _a; - if (!request.proxySettings && - !isBypassed(request.url, (_a = this.customNoProxyList) !== null && _a !== void 0 ? _a : globalNoProxyList, this.customNoProxyList ? undefined : globalBypassedMap)) { + ProxyPolicy.prototype.sendRequest = function (request) { + if (!request.proxySettings && !isBypassed(request.url)) { request.proxySettings = this.proxySettings; } return this._nextPolicy.sendRequest(request); - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Maximum number of retries for the throttling retry policy - */ -const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3; + }; + return ProxyPolicy; +}(BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. -const StatusCodes = Constants.HttpConstants.StatusCodes; +var StatusCodes = Constants.HttpConstants.StatusCodes; function throttlingRetryPolicy() { return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new ThrottlingRetryPolicy(nextPolicy, options); } }; } -const StandardAbortMessage$1 = "The operation was aborted."; /** * To learn more, please refer to * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors */ -class ThrottlingRetryPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, _handleResponse) { - super(nextPolicy, options); - this.numberOfRetries = 0; - this._handleResponse = _handleResponse || this._defaultResponseHandler; - } - async sendRequest(httpRequest) { - const response = await this._nextPolicy.sendRequest(httpRequest.clone()); - if (response.status !== StatusCodes.TooManyRequests && - response.status !== StatusCodes.ServiceUnavailable) { - return response; - } - else { - return this._handleResponse(httpRequest, response); - } +var ThrottlingRetryPolicy = /** @class */ (function (_super) { + tslib.__extends(ThrottlingRetryPolicy, _super); + function ThrottlingRetryPolicy(nextPolicy, options, _handleResponse) { + var _this = _super.call(this, nextPolicy, options) || this; + _this._handleResponse = _handleResponse || _this._defaultResponseHandler; + return _this; } - async _defaultResponseHandler(httpRequest, httpResponse) { - var _a; - const retryAfterHeader = httpResponse.headers.get(Constants.HeaderConstants.RETRY_AFTER); - if (retryAfterHeader) { - const delayInMs = ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader); - if (delayInMs) { - this.numberOfRetries += 1; - await delay(delayInMs, undefined, { - abortSignal: httpRequest.abortSignal, - abortErrorMsg: StandardAbortMessage$1 - }); - if ((_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { - throw new abortController.AbortError(StandardAbortMessage$1); - } - if (this.numberOfRetries < DEFAULT_CLIENT_MAX_RETRY_COUNT) { - return this.sendRequest(httpRequest); - } - else { - return this._nextPolicy.sendRequest(httpRequest); + ThrottlingRetryPolicy.prototype.sendRequest = function (httpRequest) { + return tslib.__awaiter(this, void 0, void 0, function () { + var _this = this; + return tslib.__generator(this, function (_a) { + return [2 /*return*/, this._nextPolicy.sendRequest(httpRequest.clone()).then(function (response) { + if (response.status !== StatusCodes.TooManyRequests) { + return response; + } + else { + return _this._handleResponse(httpRequest, response); + } + })]; + }); + }); + }; + ThrottlingRetryPolicy.prototype._defaultResponseHandler = function (httpRequest, httpResponse) { + return tslib.__awaiter(this, void 0, void 0, function () { + var retryAfterHeader, delayInMs; + var _this = this; + return tslib.__generator(this, function (_a) { + retryAfterHeader = httpResponse.headers.get(Constants.HeaderConstants.RETRY_AFTER); + if (retryAfterHeader) { + delayInMs = ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader); + if (delayInMs) { + return [2 /*return*/, delay(delayInMs).then(function (_) { return _this._nextPolicy.sendRequest(httpRequest); })]; + } } - } - } - return httpResponse; - } - static parseRetryAfterHeader(headerValue) { - const retryAfterInSeconds = Number(headerValue); + return [2 /*return*/, httpResponse]; + }); + }); + }; + ThrottlingRetryPolicy.parseRetryAfterHeader = function (headerValue) { + var retryAfterInSeconds = Number(headerValue); if (Number.isNaN(retryAfterInSeconds)) { return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue); } else { return retryAfterInSeconds * 1000; } - } - static parseDateRetryAfterHeader(headerValue) { + }; + ThrottlingRetryPolicy.parseDateRetryAfterHeader = function (headerValue) { try { - const now = Date.now(); - const date = Date.parse(headerValue); - const diff = date - now; + var now = Date.now(); + var date = Date.parse(headerValue); + var diff = date - now; return Number.isNaN(diff) ? undefined : diff; } catch (error) { return undefined; } - } -} + }; + return ThrottlingRetryPolicy; +}(BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. function signingPolicy(authenticationProvider) { return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new SigningPolicy(nextPolicy, options, authenticationProvider); } }; } -class SigningPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, authenticationProvider) { - super(nextPolicy, options); - this.authenticationProvider = authenticationProvider; +var SigningPolicy = /** @class */ (function (_super) { + tslib.__extends(SigningPolicy, _super); + function SigningPolicy(nextPolicy, options, authenticationProvider) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.authenticationProvider = authenticationProvider; + return _this; } - signRequest(request) { + SigningPolicy.prototype.signRequest = function (request) { return this.authenticationProvider.signRequest(request); - } - sendRequest(request) { - return this.signRequest(request).then((nextRequest) => this._nextPolicy.sendRequest(nextRequest)); - } -} + }; + SigningPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this.signRequest(request).then(function (nextRequest) { + return _this._nextPolicy.sendRequest(nextRequest); + }); + }; + return SigningPolicy; +}(BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. -const DefaultKeepAliveOptions = { +var DefaultKeepAliveOptions = { enable: true }; function keepAlivePolicy(keepAliveOptions) { return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions); } }; @@ -23046,7 +23776,8 @@ function keepAlivePolicy(keepAliveOptions) { /** * KeepAlivePolicy is a policy used to control keep alive settings for every request. */ -class KeepAlivePolicy extends BaseRequestPolicy { +var KeepAlivePolicy = /** @class */ (function (_super) { + tslib.__extends(KeepAlivePolicy, _super); /** * Creates an instance of KeepAlivePolicy. * @@ -23054,9 +23785,10 @@ class KeepAlivePolicy extends BaseRequestPolicy { * @param options - * @param keepAliveOptions - */ - constructor(nextPolicy, options, keepAliveOptions) { - super(nextPolicy, options); - this.keepAliveOptions = keepAliveOptions; + function KeepAlivePolicy(nextPolicy, options, keepAliveOptions) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.keepAliveOptions = keepAliveOptions; + return _this; } /** * Sends out request. @@ -23064,85 +23796,104 @@ class KeepAlivePolicy extends BaseRequestPolicy { * @param request - * @returns */ - async sendRequest(request) { - request.keepAlive = this.keepAliveOptions.enable; - return this._nextPolicy.sendRequest(request); - } -} + KeepAlivePolicy.prototype.sendRequest = function (request) { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + request.keepAlive = this.keepAliveOptions.enable; + return [2 /*return*/, this._nextPolicy.sendRequest(request)]; + }); + }); + }; + return KeepAlivePolicy; +}(BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. -const createSpan = coreTracing.createSpanFunction({ +var createSpan = coreTracing.createSpanFunction({ packagePrefix: "", namespace: "" }); -function tracingPolicy(tracingOptions = {}) { +function tracingPolicy(tracingOptions) { + if (tracingOptions === void 0) { tracingOptions = {}; } return { - create(nextPolicy, options) { + create: function (nextPolicy, options) { return new TracingPolicy(nextPolicy, options, tracingOptions); } }; } -class TracingPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, tracingOptions) { - super(nextPolicy, options); - this.userAgent = tracingOptions.userAgent; +var TracingPolicy = /** @class */ (function (_super) { + tslib.__extends(TracingPolicy, _super); + function TracingPolicy(nextPolicy, options, tracingOptions) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.userAgent = tracingOptions.userAgent; + return _this; } - async sendRequest(request) { - if (!request.tracingContext) { - return this._nextPolicy.sendRequest(request); - } - // create a new span - const path = URLBuilder.parse(request.url).getPath() || "/"; - const { span } = createSpan(path, { - tracingOptions: { - spanOptions: Object.assign(Object.assign({}, request.spanOptions), { kind: coreTracing.SpanKind.CLIENT }), - tracingContext: request.tracingContext - } - }); - span.setAttributes({ - "http.method": request.method, - "http.url": request.url, - requestId: request.requestId - }); - if (this.userAgent) { - span.setAttribute("http.user_agent", this.userAgent); - } - try { - // set headers - const spanContext = span.spanContext(); - const traceParentHeader = coreTracing.getTraceParentHeader(spanContext); - if (traceParentHeader && coreTracing.isSpanContextValid(spanContext)) { - request.headers.set("traceparent", traceParentHeader); - const traceState = spanContext.traceState && spanContext.traceState.serialize(); - // if tracestate is set, traceparent MUST be set, so only set tracestate after traceparent - if (traceState) { - request.headers.set("tracestate", traceState); - } - } - const response = await this._nextPolicy.sendRequest(request); - span.setAttribute("http.status_code", response.status); - const serviceRequestId = response.headers.get("x-ms-request-id"); - if (serviceRequestId) { - span.setAttribute("serviceRequestId", serviceRequestId); - } - span.setStatus({ - code: coreTracing.SpanStatusCode.OK - }); - return response; - } - catch (err) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: err.message + TracingPolicy.prototype.sendRequest = function (request) { + return tslib.__awaiter(this, void 0, void 0, function () { + var path, span, spanContext, traceParentHeader, traceState, response, serviceRequestId, err_1; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + if (!request.tracingContext) { + return [2 /*return*/, this._nextPolicy.sendRequest(request)]; + } + path = URLBuilder.parse(request.url).getPath() || "/"; + span = createSpan(path, { + tracingOptions: { + spanOptions: tslib.__assign(tslib.__assign({}, request.spanOptions), { kind: coreTracing.SpanKind.CLIENT }), + tracingContext: request.tracingContext + } + }).span; + span.setAttributes({ + "http.method": request.method, + "http.url": request.url, + requestId: request.requestId + }); + if (this.userAgent) { + span.setAttribute("http.user_agent", this.userAgent); + } + _a.label = 1; + case 1: + _a.trys.push([1, 3, 4, 5]); + spanContext = span.context(); + traceParentHeader = coreTracing.getTraceParentHeader(spanContext); + if (traceParentHeader) { + request.headers.set("traceparent", traceParentHeader); + traceState = spanContext.traceState && spanContext.traceState.serialize(); + // if tracestate is set, traceparent MUST be set, so only set tracestate after traceparent + if (traceState) { + request.headers.set("tracestate", traceState); + } + } + return [4 /*yield*/, this._nextPolicy.sendRequest(request)]; + case 2: + response = _a.sent(); + span.setAttribute("http.status_code", response.status); + serviceRequestId = response.headers.get("x-ms-request-id"); + if (serviceRequestId) { + span.setAttribute("serviceRequestId", serviceRequestId); + } + span.setStatus({ + code: coreTracing.SpanStatusCode.OK + }); + return [2 /*return*/, response]; + case 3: + err_1 = _a.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: err_1.message + }); + span.setAttribute("http.status_code", err_1.statusCode); + throw err_1; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - span.setAttribute("http.status_code", err.statusCode); - throw err; - } - finally { - span.end(); - } - } -} + }); + }; + return TracingPolicy; +}(BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. /** @@ -23151,7 +23902,7 @@ class TracingPolicy extends BaseRequestPolicy { */ function disableResponseDecompressionPolicy() { return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new DisableResponseDecompressionPolicy(nextPolicy, options); } }; @@ -23160,7 +23911,8 @@ function disableResponseDecompressionPolicy() { * A policy to disable response decompression according to Accept-Encoding header * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding */ -class DisableResponseDecompressionPolicy extends BaseRequestPolicy { +var DisableResponseDecompressionPolicy = /** @class */ (function (_super) { + tslib.__extends(DisableResponseDecompressionPolicy, _super); /** * Creates an instance of DisableResponseDecompressionPolicy. * @@ -23169,8 +23921,8 @@ class DisableResponseDecompressionPolicy extends BaseRequestPolicy { */ // The parent constructor is protected. /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */ - constructor(nextPolicy, options) { - super(nextPolicy, options); + function DisableResponseDecompressionPolicy(nextPolicy, options) { + return _super.call(this, nextPolicy, options) || this; } /** * Sends out request. @@ -23178,16 +23930,21 @@ class DisableResponseDecompressionPolicy extends BaseRequestPolicy { * @param request - * @returns */ - async sendRequest(request) { - request.decompressResponse = false; - return this._nextPolicy.sendRequest(request); - } -} + DisableResponseDecompressionPolicy.prototype.sendRequest = function (request) { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + request.decompressResponse = false; + return [2 /*return*/, this._nextPolicy.sendRequest(request)]; + }); + }); + }; + return DisableResponseDecompressionPolicy; +}(BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. function ndJsonPolicy() { return { - create: (nextPolicy, options) => { + create: function (nextPolicy, options) { return new NdJsonPolicy(nextPolicy, options); } }; @@ -23195,30 +23952,37 @@ function ndJsonPolicy() { /** * NdJsonPolicy that formats a JSON array as newline-delimited JSON */ -class NdJsonPolicy extends BaseRequestPolicy { +var NdJsonPolicy = /** @class */ (function (_super) { + tslib.__extends(NdJsonPolicy, _super); /** * Creates an instance of KeepAlivePolicy. */ - constructor(nextPolicy, options) { - super(nextPolicy, options); + function NdJsonPolicy(nextPolicy, options) { + return _super.call(this, nextPolicy, options) || this; } /** * Sends a request. */ - async sendRequest(request) { - // There currently isn't a good way to bypass the serializer - if (typeof request.body === "string" && request.body.startsWith("[")) { - const body = JSON.parse(request.body); - if (Array.isArray(body)) { - request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); - } - } - return this._nextPolicy.sendRequest(request); - } -} + NdJsonPolicy.prototype.sendRequest = function (request) { + return tslib.__awaiter(this, void 0, void 0, function () { + var body; + return tslib.__generator(this, function (_a) { + // There currently isn't a good way to bypass the serializer + if (typeof request.body === "string" && request.body.startsWith("[")) { + body = JSON.parse(request.body); + if (Array.isArray(body)) { + request.body = body.map(function (item) { return JSON.stringify(item) + "\n"; }).join(""); + } + } + return [2 /*return*/, this._nextPolicy.sendRequest(request)]; + }); + }); + }; + return NdJsonPolicy; +}(BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. -let cachedHttpClient; +var cachedHttpClient; function getCachedDefaultHttpClient() { if (!cachedHttpClient) { cachedHttpClient = new NodeFetchHttpClient(); @@ -23230,28 +23994,29 @@ function getCachedDefaultHttpClient() { /** * ServiceClient sends service requests and receives responses. */ -class ServiceClient { +var ServiceClient = /** @class */ (function () { /** * The ServiceClient constructor * @param credentials - The credentials used for authentication with the service. * @param options - The service client options that govern the behavior of the client. */ - constructor(credentials, + function ServiceClient(credentials, /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options */ options) { + var _this = this; if (!options) { options = {}; } this._withCredentials = options.withCredentials || false; this._httpClient = options.httpClient || getCachedDefaultHttpClient(); this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger); - let requestPolicyFactories; + var requestPolicyFactories; if (Array.isArray(options.requestPolicyFactories)) { logger.info("ServiceClient: using custom request policies"); requestPolicyFactories = options.requestPolicyFactories; } else { - let authPolicyFactory = undefined; + var authPolicyFactory = undefined; if (coreAuth.isTokenCredential(credentials)) { logger.info("ServiceClient: creating bearer token authentication policy from provided credentials"); // Create a wrapped RequestPolicyFactory here so that we can provide the @@ -23260,16 +24025,16 @@ class ServiceClient { // implementations do not set baseUri until after ServiceClient's constructor // is finished, leaving baseUri empty at the time when it is needed to // build the correct scope name. - const wrappedPolicyFactory = () => { - let bearerTokenPolicyFactory = undefined; + var wrappedPolicyFactory = function () { + var bearerTokenPolicyFactory = undefined; // eslint-disable-next-line @typescript-eslint/no-this-alias - const serviceClient = this; - const serviceClientOptions = options; + var serviceClient = _this; + var serviceClientOptions = options; return { - create(nextPolicy, createOptions) { - const credentialScopes = getCredentialScopes(serviceClientOptions, serviceClient.baseUri); + create: function (nextPolicy, createOptions) { + var credentialScopes = getCredentialScopes(serviceClientOptions, serviceClient.baseUri); if (!credentialScopes) { - throw new Error(`When using credential, the ServiceClient must contain a baseUri or a credentialScopes in ServiceClientOptions. Unable to create a bearerTokenAuthenticationPolicy`); + throw new Error("When using credential, the ServiceClient must contain a baseUri or a credentialScopes in ServiceClientOptions. Unable to create a bearerTokenAuthenticationPolicy"); } if (bearerTokenPolicyFactory === undefined || bearerTokenPolicyFactory === null) { bearerTokenPolicyFactory = bearerTokenAuthenticationPolicy(credentials, credentialScopes); @@ -23292,7 +24057,7 @@ class ServiceClient { if (options.requestPolicyFactories) { // options.requestPolicyFactories can also be a function that manipulates // the default requestPolicyFactories array - const newRequestPolicyFactories = options.requestPolicyFactories(requestPolicyFactories); + var newRequestPolicyFactories = options.requestPolicyFactories(requestPolicyFactories); if (newRequestPolicyFactories) { requestPolicyFactories = newRequestPolicyFactories; } @@ -23303,11 +24068,11 @@ class ServiceClient { /** * Send the provided httpRequest. */ - sendRequest(options) { + ServiceClient.prototype.sendRequest = function (options) { if (options === null || options === undefined || typeof options !== "object") { throw new Error("options cannot be null or undefined and it must be of type object."); } - let httpRequest; + var httpRequest; try { if (isWebResourceLike(options)) { options.validateRequestProperties(); @@ -23321,220 +24086,242 @@ class ServiceClient { catch (error) { return Promise.reject(error); } - let httpPipeline = this._httpClient; + var httpPipeline = this._httpClient; if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) { - for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) { + for (var i = this._requestPolicyFactories.length - 1; i >= 0; --i) { httpPipeline = this._requestPolicyFactories[i].create(httpPipeline, this._requestPolicyOptions); } } return httpPipeline.sendRequest(httpRequest); - } + }; /** * Send an HTTP request that is populated using the provided OperationSpec. * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. * @param operationSpec - The OperationSpec to use to populate the httpRequest. * @param callback - The callback to call when the response is received. */ - async sendOperationRequest(operationArguments, operationSpec, callback) { + ServiceClient.prototype.sendOperationRequest = function (operationArguments, operationSpec, callback) { var _a; - if (typeof operationArguments.options === "function") { - callback = operationArguments.options; - operationArguments.options = undefined; - } - const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; - const httpRequest = new WebResource(); - let result; - try { - const baseUri = operationSpec.baseUrl || this.baseUri; - if (!baseUri) { - throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use."); - } - httpRequest.method = operationSpec.httpMethod; - httpRequest.operationSpec = operationSpec; - const requestUrl = URLBuilder.parse(baseUri); - if (operationSpec.path) { - requestUrl.appendPath(operationSpec.path); - } - if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) { - for (const urlParameter of operationSpec.urlParameters) { - let urlParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, urlParameter, operationSpec.serializer); - urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, getPathStringFromParameter(urlParameter), serializerOptions); - if (!urlParameter.skipEncoding) { - urlParameterValue = encodeURIComponent(urlParameterValue); - } - requestUrl.replaceAll(`{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`, urlParameterValue); - } - } - if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) { - for (const queryParameter of operationSpec.queryParameters) { - let queryParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, queryParameter, operationSpec.serializer); - if (queryParameterValue !== undefined && queryParameterValue !== null) { - queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter), serializerOptions); - if (queryParameter.collectionFormat !== undefined && - queryParameter.collectionFormat !== null) { - if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Multi) { - if (queryParameterValue.length === 0) { - // The collection is empty, no need to try serializing the current queryParam - continue; + return tslib.__awaiter(this, void 0, void 0, function () { + var serializerOptions, httpRequest, result, baseUri, requestUrl, _i, _b, urlParameter, urlParameterValue, _c, _d, queryParameter, queryParameterValue, index, item, index, contentType, _e, _f, headerParameter, headerValue, headerCollectionPrefix, _g, _h, key, options, customHeaderName, rawResponse, sendRequestError, error_1, error_2, cb; + return tslib.__generator(this, function (_j) { + switch (_j.label) { + case 0: + if (typeof operationArguments.options === "function") { + callback = operationArguments.options; + operationArguments.options = undefined; + } + serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; + httpRequest = new WebResource(); + _j.label = 1; + case 1: + _j.trys.push([1, 6, , 7]); + baseUri = operationSpec.baseUrl || this.baseUri; + if (!baseUri) { + throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use."); + } + httpRequest.method = operationSpec.httpMethod; + httpRequest.operationSpec = operationSpec; + requestUrl = URLBuilder.parse(baseUri); + if (operationSpec.path) { + requestUrl.appendPath(operationSpec.path); + } + if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) { + for (_i = 0, _b = operationSpec.urlParameters; _i < _b.length; _i++) { + urlParameter = _b[_i]; + urlParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, urlParameter, operationSpec.serializer); + urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, getPathStringFromParameter(urlParameter), serializerOptions); + if (!urlParameter.skipEncoding) { + urlParameterValue = encodeURIComponent(urlParameterValue); } - else { - for (const index in queryParameterValue) { - const item = queryParameterValue[index]; - queryParameterValue[index] = - item === undefined || item === null ? "" : item.toString(); + requestUrl.replaceAll("{" + (urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)) + "}", urlParameterValue); + } + } + if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) { + for (_c = 0, _d = operationSpec.queryParameters; _c < _d.length; _c++) { + queryParameter = _d[_c]; + queryParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, queryParameter, operationSpec.serializer); + if (queryParameterValue !== undefined && queryParameterValue !== null) { + queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter), serializerOptions); + if (queryParameter.collectionFormat !== undefined && + queryParameter.collectionFormat !== null) { + if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Multi) { + if (queryParameterValue.length === 0) { + // The collection is empty, no need to try serializing the current queryParam + continue; + } + else { + for (index in queryParameterValue) { + item = queryParameterValue[index]; + queryParameterValue[index] = + item === undefined || item === null ? "" : item.toString(); + } + } + } + else if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Ssv || + queryParameter.collectionFormat === exports.QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + } + if (!queryParameter.skipEncoding) { + if (Array.isArray(queryParameterValue)) { + for (index in queryParameterValue) { + if (queryParameterValue[index] !== undefined && + queryParameterValue[index] !== null) { + queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]); + } + } + } + else { + queryParameterValue = encodeURIComponent(queryParameterValue); + } } + if (queryParameter.collectionFormat !== undefined && + queryParameter.collectionFormat !== null && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Multi && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Ssv && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + requestUrl.setQueryParameter(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue); } } - else if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Ssv || - queryParameter.collectionFormat === exports.QueryCollectionFormat.Tsv) { - queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); - } } - if (!queryParameter.skipEncoding) { - if (Array.isArray(queryParameterValue)) { - for (const index in queryParameterValue) { - if (queryParameterValue[index] !== undefined && - queryParameterValue[index] !== null) { - queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]); + httpRequest.url = requestUrl.toString(); + contentType = operationSpec.contentType || this.requestContentType; + if (contentType && operationSpec.requestBody) { + httpRequest.headers.set("Content-Type", contentType); + } + if (operationSpec.headerParameters) { + for (_e = 0, _f = operationSpec.headerParameters; _e < _f.length; _e++) { + headerParameter = _f[_e]; + headerValue = getOperationArgumentValueFromParameter(this, operationArguments, headerParameter, operationSpec.serializer); + if (headerValue !== undefined && headerValue !== null) { + headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter), serializerOptions); + headerCollectionPrefix = headerParameter.mapper + .headerCollectionPrefix; + if (headerCollectionPrefix) { + for (_g = 0, _h = Object.keys(headerValue); _g < _h.length; _g++) { + key = _h[_g]; + httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]); + } + } + else { + httpRequest.headers.set(headerParameter.mapper.serializedName || + getPathStringFromParameter(headerParameter), headerValue); } } } - else { - queryParameterValue = encodeURIComponent(queryParameterValue); + } + options = operationArguments.options; + if (options) { + if (options.customHeaders) { + for (customHeaderName in options.customHeaders) { + httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]); + } + } + if (options.abortSignal) { + httpRequest.abortSignal = options.abortSignal; + } + if (options.timeout) { + httpRequest.timeout = options.timeout; + } + if (options.onUploadProgress) { + httpRequest.onUploadProgress = options.onUploadProgress; + } + if (options.onDownloadProgress) { + httpRequest.onDownloadProgress = options.onDownloadProgress; + } + if (options.spanOptions) { + httpRequest.spanOptions = options.spanOptions; + } + if (options.tracingContext) { + httpRequest.tracingContext = options.tracingContext; + } + if (options.shouldDeserialize !== undefined && options.shouldDeserialize !== null) { + httpRequest.shouldDeserialize = options.shouldDeserialize; } } - if (queryParameter.collectionFormat !== undefined && - queryParameter.collectionFormat !== null && - queryParameter.collectionFormat !== exports.QueryCollectionFormat.Multi && - queryParameter.collectionFormat !== exports.QueryCollectionFormat.Ssv && - queryParameter.collectionFormat !== exports.QueryCollectionFormat.Tsv) { - queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + httpRequest.withCredentials = this._withCredentials; + serializeRequestBody(this, httpRequest, operationArguments, operationSpec); + if (httpRequest.streamResponseStatusCodes === undefined) { + httpRequest.streamResponseStatusCodes = getStreamResponseStatusCodes(operationSpec); } - requestUrl.setQueryParameter(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue); - } - } - } - httpRequest.url = requestUrl.toString(); - const contentType = operationSpec.contentType || this.requestContentType; - if (contentType && operationSpec.requestBody) { - httpRequest.headers.set("Content-Type", contentType); - } - if (operationSpec.headerParameters) { - for (const headerParameter of operationSpec.headerParameters) { - let headerValue = getOperationArgumentValueFromParameter(this, operationArguments, headerParameter, operationSpec.serializer); - if (headerValue !== undefined && headerValue !== null) { - headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter), serializerOptions); - const headerCollectionPrefix = headerParameter.mapper - .headerCollectionPrefix; - if (headerCollectionPrefix) { - for (const key of Object.keys(headerValue)) { - httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]); + rawResponse = void 0; + sendRequestError = void 0; + _j.label = 2; + case 2: + _j.trys.push([2, 4, , 5]); + return [4 /*yield*/, this.sendRequest(httpRequest)]; + case 3: + rawResponse = _j.sent(); + return [3 /*break*/, 5]; + case 4: + error_1 = _j.sent(); + sendRequestError = error_1; + return [3 /*break*/, 5]; + case 5: + if (sendRequestError) { + if (sendRequestError.response) { + sendRequestError.details = flattenResponse(sendRequestError.response, operationSpec.responses[sendRequestError.statusCode] || + operationSpec.responses["default"]); } + result = Promise.reject(sendRequestError); } else { - httpRequest.headers.set(headerParameter.mapper.serializedName || - getPathStringFromParameter(headerParameter), headerValue); + result = Promise.resolve(flattenResponse(rawResponse, operationSpec.responses[rawResponse.status])); } - } - } - } - const options = operationArguments.options; - if (options) { - if (options.customHeaders) { - for (const customHeaderName in options.customHeaders) { - httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]); - } - } - if (options.abortSignal) { - httpRequest.abortSignal = options.abortSignal; - } - if (options.timeout) { - httpRequest.timeout = options.timeout; - } - if (options.onUploadProgress) { - httpRequest.onUploadProgress = options.onUploadProgress; - } - if (options.onDownloadProgress) { - httpRequest.onDownloadProgress = options.onDownloadProgress; - } - if (options.spanOptions) { - httpRequest.spanOptions = options.spanOptions; - } - if (options.tracingContext) { - httpRequest.tracingContext = options.tracingContext; - } - if (options.shouldDeserialize !== undefined && options.shouldDeserialize !== null) { - httpRequest.shouldDeserialize = options.shouldDeserialize; - } - } - httpRequest.withCredentials = this._withCredentials; - serializeRequestBody(this, httpRequest, operationArguments, operationSpec); - if (httpRequest.streamResponseStatusCodes === undefined) { - httpRequest.streamResponseStatusCodes = getStreamResponseStatusCodes(operationSpec); - } - let rawResponse; - let sendRequestError; - try { - rawResponse = await this.sendRequest(httpRequest); - } - catch (error) { - sendRequestError = error; - } - if (sendRequestError) { - if (sendRequestError.response) { - sendRequestError.details = flattenResponse(sendRequestError.response, operationSpec.responses[sendRequestError.statusCode] || - operationSpec.responses["default"]); + return [3 /*break*/, 7]; + case 6: + error_2 = _j.sent(); + result = Promise.reject(error_2); + return [3 /*break*/, 7]; + case 7: + cb = callback; + if (cb) { + result + .then(function (res) { return cb(null, res._response.parsedBody, res._response.request, res._response); }) + .catch(function (err) { return cb(err); }); + } + return [2 /*return*/, result]; } - result = Promise.reject(sendRequestError); - } - else { - result = Promise.resolve(flattenResponse(rawResponse, operationSpec.responses[rawResponse.status])); - } - } - catch (error) { - result = Promise.reject(error); - } - const cb = callback; - if (cb) { - result - .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response)) - .catch((err) => cb(err)); - } - return result; - } -} + }); + }); + }; + return ServiceClient; +}()); function serializeRequestBody(serviceClient, httpRequest, operationArguments, operationSpec) { var _a, _b, _c, _d, _e, _f; - const serializerOptions = (_b = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions) !== null && _b !== void 0 ? _b : {}; - const updatedOptions = { + var serializerOptions = (_b = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions) !== null && _b !== void 0 ? _b : {}; + var updatedOptions = { rootName: (_c = serializerOptions.rootName) !== null && _c !== void 0 ? _c : "", includeRoot: (_d = serializerOptions.includeRoot) !== null && _d !== void 0 ? _d : false, xmlCharKey: (_e = serializerOptions.xmlCharKey) !== null && _e !== void 0 ? _e : XML_CHARKEY }; - const xmlCharKey = serializerOptions.xmlCharKey; + var xmlCharKey = serializerOptions.xmlCharKey; if (operationSpec.requestBody && operationSpec.requestBody.mapper) { httpRequest.body = getOperationArgumentValueFromParameter(serviceClient, operationArguments, operationSpec.requestBody, operationSpec.serializer); - const bodyMapper = operationSpec.requestBody.mapper; - const { required, xmlName, xmlElementName, serializedName, xmlNamespace, xmlNamespacePrefix } = bodyMapper; - const typeName = bodyMapper.type.name; + var bodyMapper = operationSpec.requestBody.mapper; + var required = bodyMapper.required, xmlName = bodyMapper.xmlName, xmlElementName = bodyMapper.xmlElementName, serializedName = bodyMapper.serializedName, xmlNamespace = bodyMapper.xmlNamespace, xmlNamespacePrefix = bodyMapper.xmlNamespacePrefix; + var typeName = bodyMapper.type.name; try { if ((httpRequest.body !== undefined && httpRequest.body !== null) || required) { - const requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody); + var requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody); httpRequest.body = operationSpec.serializer.serialize(bodyMapper, httpRequest.body, requestBodyParameterPathString, updatedOptions); - const isStream = typeName === MapperType.Stream; + var isStream = typeName === MapperType.Stream; if (operationSpec.isXML) { - const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : "xmlns"; - const value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, httpRequest.body, updatedOptions); + var xmlnsKey = xmlNamespacePrefix ? "xmlns:" + xmlNamespacePrefix : "xmlns"; + var value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, httpRequest.body, updatedOptions); if (typeName === MapperType.Sequence) { httpRequest.body = stringifyXML(prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { rootName: xmlName || serializedName, - xmlCharKey + xmlCharKey: xmlCharKey }); } else if (!isStream) { httpRequest.body = stringifyXML(value, { rootName: xmlName || serializedName, - xmlCharKey + xmlCharKey: xmlCharKey }); } } @@ -23550,15 +24337,16 @@ function serializeRequestBody(serviceClient, httpRequest, operationArguments, op } } catch (error) { - throw new Error(`Error "${error.message}" occurred in serializing the payload - ${JSON.stringify(serializedName, undefined, " ")}.`); + throw new Error("Error \"" + error.message + "\" occurred in serializing the payload - " + JSON.stringify(serializedName, undefined, " ") + "."); } } else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { httpRequest.formData = {}; - for (const formDataParameter of operationSpec.formDataParameters) { - const formDataParameterValue = getOperationArgumentValueFromParameter(serviceClient, operationArguments, formDataParameter, operationSpec.serializer); + for (var _i = 0, _g = operationSpec.formDataParameters; _i < _g.length; _i++) { + var formDataParameter = _g[_i]; + var formDataParameterValue = getOperationArgumentValueFromParameter(serviceClient, operationArguments, formDataParameter, operationSpec.serializer); if (formDataParameterValue !== undefined && formDataParameterValue !== null) { - const formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); + var formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, getPathStringFromParameter(formDataParameter), updatedOptions); } } @@ -23568,18 +24356,19 @@ function serializeRequestBody(serviceClient, httpRequest, operationArguments, op * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself */ function getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, serializedValue, options) { + var _a; // Composite and Sequence schemas already got their root namespace set during serialization // We just need to add xmlns to the other schema types if (xmlNamespace && !["Composite", "Sequence", "Dictionary"].includes(typeName)) { - const result = {}; + var result = {}; result[options.xmlCharKey] = serializedValue; - result[XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace }; + result[XML_ATTRKEY] = (_a = {}, _a[xmlnsKey] = xmlNamespace, _a); return result; } return serializedValue; } function getValueOrFunctionResult(value, defaultValueCreator) { - let result; + var result; if (typeof value === "string") { result = value; } @@ -23592,15 +24381,15 @@ function getValueOrFunctionResult(value, defaultValueCreator) { return result; } function createDefaultRequestPolicyFactories(authPolicyFactory, options) { - const factories = []; + var factories = []; if (options.generateClientRequestIdHeader) { factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName)); } if (authPolicyFactory) { factories.push(authPolicyFactory); } - const userAgentHeaderName = getValueOrFunctionResult(options.userAgentHeaderName, getDefaultUserAgentHeaderName); - const userAgentHeaderValue = getValueOrFunctionResult(options.userAgent, getDefaultUserAgentValue); + var userAgentHeaderName = getValueOrFunctionResult(options.userAgentHeaderName, getDefaultUserAgentHeaderName); + var userAgentHeaderValue = getValueOrFunctionResult(options.userAgent, getDefaultUserAgentValue); if (userAgentHeaderName && userAgentHeaderValue) { factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue })); } @@ -23612,37 +24401,37 @@ function createDefaultRequestPolicyFactories(authPolicyFactory, options) { factories.push(throttlingRetryPolicy()); } factories.push(deserializationPolicy(options.deserializationContentTypes)); - if (isNode) { + { factories.push(proxyPolicy(options.proxySettings)); } factories.push(logPolicy({ logger: logger.info })); return factories; } function createPipelineFromOptions(pipelineOptions, authPolicyFactory) { - const requestPolicyFactories = []; + var requestPolicyFactories = []; if (pipelineOptions.sendStreamingJson) { requestPolicyFactories.push(ndJsonPolicy()); } - let userAgentValue = undefined; + var userAgentValue = undefined; if (pipelineOptions.userAgentOptions && pipelineOptions.userAgentOptions.userAgentPrefix) { - const userAgentInfo = []; + var userAgentInfo = []; userAgentInfo.push(pipelineOptions.userAgentOptions.userAgentPrefix); // Add the default user agent value if it isn't already specified // by the userAgentPrefix option. - const defaultUserAgentInfo = getDefaultUserAgentValue(); + var defaultUserAgentInfo = getDefaultUserAgentValue(); if (userAgentInfo.indexOf(defaultUserAgentInfo) === -1) { userAgentInfo.push(defaultUserAgentInfo); } userAgentValue = userAgentInfo.join(" "); } - const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions); - const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions); - const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions); - if (isNode) { + var keepAliveOptions = tslib.__assign(tslib.__assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions); + var retryOptions = tslib.__assign(tslib.__assign({}, DefaultRetryOptions), pipelineOptions.retryOptions); + var redirectOptions = tslib.__assign(tslib.__assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions); + { requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions)); } - const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions); - const loggingOptions = Object.assign({}, pipelineOptions.loggingOptions); + var deserializationOptions = tslib.__assign(tslib.__assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions); + var loggingOptions = tslib.__assign({}, pipelineOptions.loggingOptions); requestPolicyFactories.push(tracingPolicy({ userAgent: userAgentValue }), keepAlivePolicy(keepAliveOptions), userAgentPolicy({ value: userAgentValue }), generateClientRequestIdPolicy(), deserializationPolicy(deserializationOptions.expectedContentTypes), throttlingRetryPolicy(), systemErrorRetryPolicy(), exponentialRetryPolicy(retryOptions.maxRetries, retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs)); if (redirectOptions.handleRedirects) { requestPolicyFactories.push(redirectPolicy(redirectOptions.maxRetries)); @@ -23656,7 +24445,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) { } return { httpClient: pipelineOptions.httpClient, - requestPolicyFactories + requestPolicyFactories: requestPolicyFactories }; } function getOperationArgumentValueFromParameter(serviceClient, operationArguments, parameter, serializer) { @@ -23664,22 +24453,22 @@ function getOperationArgumentValueFromParameter(serviceClient, operationArgument } function getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameterPath, parameterMapper, serializer) { var _a; - let value; + var value; if (typeof parameterPath === "string") { parameterPath = [parameterPath]; } - const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; + var serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; if (Array.isArray(parameterPath)) { if (parameterPath.length > 0) { if (parameterMapper.isConstant) { value = parameterMapper.defaultValue; } else { - let propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); + var propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); if (!propertySearchResult.propertyFound) { propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath); } - let useDefaultValue = false; + var useDefaultValue = false; if (!propertySearchResult.propertyFound) { useDefaultValue = parameterMapper.required || @@ -23688,7 +24477,7 @@ function getOperationArgumentValueFromParameterPath(serviceClient, operationArgu value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; } // Serialize just for validation purposes. - const parameterPathString = getPathStringFromParameterPath(parameterPath, parameterMapper); + var parameterPathString = getPathStringFromParameterPath(parameterPath, parameterMapper); serializer.serialize(parameterMapper, value, parameterPathString, serializerOptions); } } @@ -23696,12 +24485,12 @@ function getOperationArgumentValueFromParameterPath(serviceClient, operationArgu if (parameterMapper.required) { value = {}; } - for (const propertyName in parameterPath) { - const propertyMapper = parameterMapper.type.modelProperties[propertyName]; - const propertyPath = parameterPath[propertyName]; - const propertyValue = getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, propertyPath, propertyMapper, serializer); + for (var propertyName in parameterPath) { + var propertyMapper = parameterMapper.type.modelProperties[propertyName]; + var propertyPath = parameterPath[propertyName]; + var propertyValue = getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, propertyPath, propertyMapper, serializer); // Serialize just for validation purposes. - const propertyPathString = getPathStringFromParameterPath(propertyPath, propertyMapper); + var propertyPathString = getPathStringFromParameterPath(propertyPath, propertyMapper); serializer.serialize(propertyMapper, propertyValue, propertyPathString, serializerOptions); if (propertyValue !== undefined && propertyValue !== null) { if (!value) { @@ -23714,10 +24503,10 @@ function getOperationArgumentValueFromParameterPath(serviceClient, operationArgu return value; } function getPropertyFromParameterPath(parent, parameterPath) { - const result = { propertyFound: false }; - let i = 0; + var result = { propertyFound: false }; + var i = 0; for (; i < parameterPath.length; ++i) { - const parameterPathPart = parameterPath[i]; + var parameterPathPart = parameterPath[i]; // Make sure to check inherited properties too, so don't use hasOwnProperty(). if (parent !== undefined && parent !== null && parameterPathPart in parent) { parent = parent[parameterPathPart]; @@ -23733,29 +24522,31 @@ function getPropertyFromParameterPath(parent, parameterPath) { return result; } function flattenResponse(_response, responseSpec) { - const parsedHeaders = _response.parsedHeaders; - const bodyMapper = responseSpec && responseSpec.bodyMapper; - const addOperationResponse = (obj) => { + var parsedHeaders = _response.parsedHeaders; + var bodyMapper = responseSpec && responseSpec.bodyMapper; + var addOperationResponse = function (obj) { return Object.defineProperty(obj, "_response", { value: _response }); }; if (bodyMapper) { - const typeName = bodyMapper.type.name; + var typeName = bodyMapper.type.name; if (typeName === "Stream") { - return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), { blobBody: _response.blobBody, readableStreamBody: _response.readableStreamBody })); + return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), { blobBody: _response.blobBody, readableStreamBody: _response.readableStreamBody })); } - const modelProperties = (typeName === "Composite" && bodyMapper.type.modelProperties) || {}; - const isPageableResponse = Object.keys(modelProperties).some((k) => modelProperties[k].serializedName === ""); + var modelProperties_1 = (typeName === "Composite" && bodyMapper.type.modelProperties) || {}; + var isPageableResponse = Object.keys(modelProperties_1).some(function (k) { return modelProperties_1[k].serializedName === ""; }); if (typeName === "Sequence" || isPageableResponse) { - const arrayResponse = [...(_response.parsedBody || [])]; - for (const key of Object.keys(modelProperties)) { - if (modelProperties[key].serializedName) { + var arrayResponse = tslib.__spreadArray([], (_response.parsedBody || [])); + for (var _i = 0, _a = Object.keys(modelProperties_1); _i < _a.length; _i++) { + var key = _a[_i]; + if (modelProperties_1[key].serializedName) { arrayResponse[key] = _response.parsedBody[key]; } } if (parsedHeaders) { - for (const key of Object.keys(parsedHeaders)) { + for (var _b = 0, _c = Object.keys(parsedHeaders); _b < _c.length; _b++) { + var key = _c[_b]; arrayResponse[key] = parsedHeaders[key]; } } @@ -23763,26 +24554,26 @@ function flattenResponse(_response, responseSpec) { return arrayResponse; } if (typeName === "Composite" || typeName === "Dictionary") { - return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), _response.parsedBody)); + return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), _response.parsedBody)); } } if (bodyMapper || _response.request.method === "HEAD" || isPrimitiveType(_response.parsedBody)) { // primitive body types and HEAD booleans - return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), { body: _response.parsedBody })); + return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), { body: _response.parsedBody })); } - return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), _response.parsedBody)); + return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), _response.parsedBody)); } function getCredentialScopes(options, baseUri) { if (options === null || options === void 0 ? void 0 : options.credentialScopes) { - const scopes = options.credentialScopes; + var scopes = options.credentialScopes; return Array.isArray(scopes) - ? scopes.map((scope) => new url.URL(scope).toString()) + ? scopes.map(function (scope) { return new url.URL(scope).toString(); }) : new url.URL(scopes).toString(); } if (baseUri) { - return `${baseUri}/.default`; + return baseUri + "/.default"; } return undefined; } @@ -23806,7 +24597,7 @@ function createSpanFunction(args) { /** * Defines the default token refresh buffer duration. */ -const TokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes +var TokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes /** * Provides an {@link AccessTokenCache} implementation which clears * the cached {@link AccessToken}'s after the expiresOnTimestamp has @@ -23814,36 +24605,38 @@ const TokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes * * @deprecated No longer used in the bearer authorization policy. */ -class ExpiringAccessTokenCache { +var ExpiringAccessTokenCache = /** @class */ (function () { /** * Constructs an instance of {@link ExpiringAccessTokenCache} with * an optional expiration buffer time. */ - constructor(tokenRefreshBufferMs = TokenRefreshBufferMs) { + function ExpiringAccessTokenCache(tokenRefreshBufferMs) { + if (tokenRefreshBufferMs === void 0) { tokenRefreshBufferMs = TokenRefreshBufferMs; } this.cachedToken = undefined; this.tokenRefreshBufferMs = tokenRefreshBufferMs; } - setCachedToken(accessToken) { + ExpiringAccessTokenCache.prototype.setCachedToken = function (accessToken) { this.cachedToken = accessToken; - } - getCachedToken() { + }; + ExpiringAccessTokenCache.prototype.getCachedToken = function () { if (this.cachedToken && Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp) { this.cachedToken = undefined; } return this.cachedToken; - } -} + }; + return ExpiringAccessTokenCache; +}()); // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. /** * Helps the core-http token authentication policies with requesting a new token if we're not currently waiting for a new token. * * @deprecated No longer used in the bearer authorization policy. */ -class AccessTokenRefresher { - constructor(credential, scopes, requiredMillisecondsBeforeNewRefresh = 30000) { +var AccessTokenRefresher = /** @class */ (function () { + function AccessTokenRefresher(credential, scopes, requiredMillisecondsBeforeNewRefresh) { + if (requiredMillisecondsBeforeNewRefresh === void 0) { requiredMillisecondsBeforeNewRefresh = 30000; } this.credential = credential; this.scopes = scopes; this.requiredMillisecondsBeforeNewRefresh = requiredMillisecondsBeforeNewRefresh; @@ -23853,38 +24646,49 @@ class AccessTokenRefresher { * Returns true if the required milliseconds(defaulted to 30000) have been passed signifying * that we are ready for a new refresh. */ - isReady() { + AccessTokenRefresher.prototype.isReady = function () { // We're only ready for a new refresh if the required milliseconds have passed. return (!this.lastCalled || Date.now() - this.lastCalled > this.requiredMillisecondsBeforeNewRefresh); - } + }; /** * Stores the time in which it is called, * then requests a new token, * then sets this.promise to undefined, * then returns the token. */ - async getToken(options) { - this.lastCalled = Date.now(); - const token = await this.credential.getToken(this.scopes, options); - this.promise = undefined; - return token || undefined; - } + AccessTokenRefresher.prototype.getToken = function (options) { + return tslib.__awaiter(this, void 0, void 0, function () { + var token; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + this.lastCalled = Date.now(); + return [4 /*yield*/, this.credential.getToken(this.scopes, options)]; + case 1: + token = _a.sent(); + this.promise = undefined; + return [2 /*return*/, token || undefined]; + } + }); + }); + }; /** * Requests a new token if we're not currently waiting for a new token. * Returns null if the required time between each call hasn't been reached. */ - refresh(options) { + AccessTokenRefresher.prototype.refresh = function (options) { if (!this.promise) { this.promise = this.getToken(options); } return this.promise; - } -} + }; + return AccessTokenRefresher; +}()); // Copyright (c) Microsoft Corporation. -const HeaderConstants = Constants.HeaderConstants; -const DEFAULT_AUTHORIZATION_SCHEME = "Basic"; -class BasicAuthenticationCredentials { +var HeaderConstants = Constants.HeaderConstants; +var DEFAULT_AUTHORIZATION_SCHEME = "Basic"; +var BasicAuthenticationCredentials = /** @class */ (function () { /** * Creates a new BasicAuthenticationCredentials object. * @@ -23892,7 +24696,8 @@ class BasicAuthenticationCredentials { * @param password - Password. * @param authorizationScheme - The authorization scheme. */ - constructor(userName, password, authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME) { + function BasicAuthenticationCredentials(userName, password, authorizationScheme) { + if (authorizationScheme === void 0) { authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; } this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { throw new Error("userName cannot be null or undefined and must be of type string."); @@ -23910,27 +24715,28 @@ class BasicAuthenticationCredentials { * @param webResource - The WebResourceLike to be signed. * @returns The signed request object. */ - signRequest(webResource) { - const credentials = `${this.userName}:${this.password}`; - const encodedCredentials = `${this.authorizationScheme} ${encodeString(credentials)}`; + BasicAuthenticationCredentials.prototype.signRequest = function (webResource) { + var credentials = this.userName + ":" + this.password; + var encodedCredentials = this.authorizationScheme + " " + encodeString(credentials); if (!webResource.headers) webResource.headers = new HttpHeaders(); webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials); return Promise.resolve(webResource); - } -} + }; + return BasicAuthenticationCredentials; +}()); // Copyright (c) Microsoft Corporation. /** * Authenticates to a service using an API key. */ -class ApiKeyCredentials { +var ApiKeyCredentials = /** @class */ (function () { /** * @param options - Specifies the options to be provided for auth. Either header or query needs to be provided. */ - constructor(options) { + function ApiKeyCredentials(options) { if (!options || (options && !options.inHeader && !options.inQuery)) { - throw new Error(`options cannot be null or undefined. Either "inHeader" or "inQuery" property of the options object needs to be provided.`); + throw new Error("options cannot be null or undefined. Either \"inHeader\" or \"inQuery\" property of the options object needs to be provided."); } this.inHeader = options.inHeader; this.inQuery = options.inQuery; @@ -23941,55 +24747,60 @@ class ApiKeyCredentials { * @param webResource - The WebResourceLike to be signed. * @returns The signed request object. */ - signRequest(webResource) { + ApiKeyCredentials.prototype.signRequest = function (webResource) { if (!webResource) { - return Promise.reject(new Error(`webResource cannot be null or undefined and must be of type "object".`)); + return Promise.reject(new Error("webResource cannot be null or undefined and must be of type \"object\".")); } if (this.inHeader) { if (!webResource.headers) { webResource.headers = new HttpHeaders(); } - for (const headerName in this.inHeader) { + for (var headerName in this.inHeader) { webResource.headers.set(headerName, this.inHeader[headerName]); } } if (this.inQuery) { if (!webResource.url) { - return Promise.reject(new Error(`url cannot be null in the request object.`)); + return Promise.reject(new Error("url cannot be null in the request object.")); } if (webResource.url.indexOf("?") < 0) { webResource.url += "?"; } - for (const key in this.inQuery) { + for (var key in this.inQuery) { if (!webResource.url.endsWith("?")) { webResource.url += "&"; } - webResource.url += `${key}=${this.inQuery[key]}`; + webResource.url += key + "=" + this.inQuery[key]; } } return Promise.resolve(webResource); - } -} + }; + return ApiKeyCredentials; +}()); // Copyright (c) Microsoft Corporation. -class TopicCredentials extends ApiKeyCredentials { +var TopicCredentials = /** @class */ (function (_super) { + tslib.__extends(TopicCredentials, _super); /** * Creates a new EventGrid TopicCredentials object. * * @param topicKey - The EventGrid topic key */ - constructor(topicKey) { + function TopicCredentials(topicKey) { + var _this = this; if (!topicKey || (topicKey && typeof topicKey !== "string")) { throw new Error("topicKey cannot be null or undefined and must be of type string."); } - const options = { + var options = { inHeader: { "aeg-sas-key": topicKey } }; - super(options); + _this = _super.call(this, options) || this; + return _this; } -} + return TopicCredentials; +}(ApiKeyCredentials)); Object.defineProperty(exports, 'isTokenCredential', { enumerable: true, @@ -25214,92 +26025,7 @@ var DiagLogLevel; /***/ }), /* 361 */, -/* 362 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.DiagComponentLogger = void 0; -var global_utils_1 = __webpack_require__(525); -/** - * Component Logger which is meant to be used as part of any component which - * will add automatically additional namespace in front of the log message. - * It will then forward all message to global diag logger - * @example - * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' }); - * cLogger.debug('test'); - * // @opentelemetry/instrumentation-http test - */ -var DiagComponentLogger = /** @class */ (function () { - function DiagComponentLogger(props) { - this._namespace = props.namespace || 'DiagComponentLogger'; - } - DiagComponentLogger.prototype.debug = function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - return logProxy('debug', this._namespace, args); - }; - DiagComponentLogger.prototype.error = function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - return logProxy('error', this._namespace, args); - }; - DiagComponentLogger.prototype.info = function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - return logProxy('info', this._namespace, args); - }; - DiagComponentLogger.prototype.warn = function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - return logProxy('warn', this._namespace, args); - }; - DiagComponentLogger.prototype.verbose = function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - return logProxy('verbose', this._namespace, args); - }; - return DiagComponentLogger; -}()); -exports.DiagComponentLogger = DiagComponentLogger; -function logProxy(funcName, namespace, args) { - var logger = global_utils_1.getGlobal('diag'); - // shortcut if logger not set - if (!logger) { - return; - } - args.unshift(namespace); - return logger[funcName].apply(logger, args); -} -//# sourceMappingURL=ComponentLogger.js.map - -/***/ }), +/* 362 */, /* 363 */, /* 364 */, /* 365 */, @@ -25783,7 +26509,6 @@ Object.defineProperty(exports, "__esModule", { value: true }); exports.ProxyTracerProvider = void 0; var ProxyTracer_1 = __webpack_require__(398); var NoopTracerProvider_1 = __webpack_require__(224); -var NOOP_TRACER_PROVIDER = new NoopTracerProvider_1.NoopTracerProvider(); /** * Tracer provider which provides {@link ProxyTracer}s. * @@ -25804,7 +26529,7 @@ var ProxyTracerProvider = /** @class */ (function () { }; ProxyTracerProvider.prototype.getDelegate = function () { var _a; - return (_a = this._delegate) !== null && _a !== void 0 ? _a : NOOP_TRACER_PROVIDER; + return (_a = this._delegate) !== null && _a !== void 0 ? _a : NoopTracerProvider_1.NOOP_TRACER_PROVIDER; }; /** * Set the delegate tracer provider @@ -25862,7 +26587,6 @@ module.exports = function (Yallist) { Object.defineProperty(exports, "__esModule", { value: true }); exports.ProxyTracer = void 0; var NoopTracer_1 = __webpack_require__(216); -var NOOP_TRACER = new NoopTracer_1.NoopTracer(); /** * Proxy tracer provided by the proxy tracer provider */ @@ -25875,10 +26599,6 @@ var ProxyTracer = /** @class */ (function () { ProxyTracer.prototype.startSpan = function (name, options, context) { return this._getTracer().startSpan(name, options, context); }; - ProxyTracer.prototype.startActiveSpan = function (_name, _options, _context, _fn) { - var tracer = this._getTracer(); - return Reflect.apply(tracer.startActiveSpan, tracer, arguments); - }; /** * Try to get a tracer from the proxy tracer provider. * If the proxy tracer provider has no delegate, return a noop tracer. @@ -25889,7 +26609,7 @@ var ProxyTracer = /** @class */ (function () { } var tracer = this._provider.getDelegateTracer(this.name, this.version); if (!tracer) { - return NOOP_TRACER; + return NoopTracer_1.NOOP_TRACER; } this._delegate = tracer; return this._delegate; @@ -28079,77 +28799,7 @@ exports.listTar = listTar; /***/ }), /* 435 */, /* 436 */, -/* 437 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.NonRecordingSpan = void 0; -var spancontext_utils_1 = __webpack_require__(453); -/** - * The NonRecordingSpan is the default {@link Span} that is used when no Span - * implementation is available. All operations are no-op including context - * propagation. - */ -var NonRecordingSpan = /** @class */ (function () { - function NonRecordingSpan(_spanContext) { - if (_spanContext === void 0) { _spanContext = spancontext_utils_1.INVALID_SPAN_CONTEXT; } - this._spanContext = _spanContext; - } - // Returns a SpanContext. - NonRecordingSpan.prototype.spanContext = function () { - return this._spanContext; - }; - // By default does nothing - NonRecordingSpan.prototype.setAttribute = function (_key, _value) { - return this; - }; - // By default does nothing - NonRecordingSpan.prototype.setAttributes = function (_attributes) { - return this; - }; - // By default does nothing - NonRecordingSpan.prototype.addEvent = function (_name, _attributes) { - return this; - }; - // By default does nothing - NonRecordingSpan.prototype.setStatus = function (_status) { - return this; - }; - // By default does nothing - NonRecordingSpan.prototype.updateName = function (_name) { - return this; - }; - // By default does nothing - NonRecordingSpan.prototype.end = function (_endTime) { }; - // isRecording always returns false for NonRecordingSpan. - NonRecordingSpan.prototype.isRecording = function () { - return false; - }; - // By default does nothing - NonRecordingSpan.prototype.recordException = function (_exception, _time) { }; - return NonRecordingSpan; -}()); -exports.NonRecordingSpan = NonRecordingSpan; -//# sourceMappingURL=NonRecordingSpan.js.map - -/***/ }), +/* 437 */, /* 438 */, /* 439 */ /***/ (function(__unusedmodule, exports, __webpack_require__) { @@ -28351,16 +29001,19 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) { for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.diag = exports.propagation = exports.trace = exports.context = exports.isValidSpanId = exports.isValidTraceId = exports.isSpanContextValid = exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = exports.baggageEntryMetadataFromString = void 0; -__exportStar(__webpack_require__(880), exports); -var utils_1 = __webpack_require__(872); -Object.defineProperty(exports, "baggageEntryMetadataFromString", { enumerable: true, get: function () { return utils_1.baggageEntryMetadataFromString; } }); +exports.diag = exports.propagation = exports.trace = exports.context = exports.isValidSpanId = exports.isValidTraceId = exports.isSpanContextValid = exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = void 0; +__exportStar(__webpack_require__(792), exports); __exportStar(__webpack_require__(452), exports); __exportStar(__webpack_require__(158), exports); __exportStar(__webpack_require__(893), exports); +__exportStar(__webpack_require__(637), exports); __exportStar(__webpack_require__(649), exports); __exportStar(__webpack_require__(906), exports); +__exportStar(__webpack_require__(727), exports); +__exportStar(__webpack_require__(851), exports); __exportStar(__webpack_require__(843), exports); +__exportStar(__webpack_require__(216), exports); +__exportStar(__webpack_require__(224), exports); __exportStar(__webpack_require__(398), exports); __exportStar(__webpack_require__(394), exports); __exportStar(__webpack_require__(79), exports); @@ -28370,6 +29023,7 @@ __exportStar(__webpack_require__(670), exports); __exportStar(__webpack_require__(59), exports); __exportStar(__webpack_require__(220), exports); __exportStar(__webpack_require__(409), exports); +__exportStar(__webpack_require__(839), exports); __exportStar(__webpack_require__(975), exports); __exportStar(__webpack_require__(70), exports); __exportStar(__webpack_require__(694), exports); @@ -28382,6 +29036,7 @@ Object.defineProperty(exports, "isSpanContextValid", { enumerable: true, get: fu Object.defineProperty(exports, "isValidTraceId", { enumerable: true, get: function () { return spancontext_utils_1.isValidTraceId; } }); Object.defineProperty(exports, "isValidSpanId", { enumerable: true, get: function () { return spancontext_utils_1.isValidSpanId; } }); __exportStar(__webpack_require__(132), exports); +__exportStar(__webpack_require__(754), exports); __exportStar(__webpack_require__(845), exports); var context_1 = __webpack_require__(77); /** Entrypoint for context API */ @@ -28696,23 +29351,7 @@ Object.defineProperty(exports, "__esModule", { value: true }); "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); -exports.wrapSpanContext = exports.isSpanContextValid = exports.isValidSpanId = exports.isValidTraceId = exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = void 0; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -var NonRecordingSpan_1 = __webpack_require__(437); +exports.isSpanContextValid = exports.isValidSpanId = exports.isValidTraceId = exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = void 0; var trace_flags_1 = __webpack_require__(975); var VALID_TRACEID_REGEX = /^([0-9a-f]{32})$/i; var VALID_SPANID_REGEX = /^[0-9a-f]{16}$/i; @@ -28739,16 +29378,6 @@ function isSpanContextValid(spanContext) { return (isValidTraceId(spanContext.traceId) && isValidSpanId(spanContext.spanId)); } exports.isSpanContextValid = isSpanContextValid; -/** - * Wrap the given {@link SpanContext} in a new non-recording {@link Span} - * - * @param spanContext span context to be wrapped - * @returns a new non-recording {@link Span} with the provided context - */ -function wrapSpanContext(spanContext) { - return new NonRecordingSpan_1.NonRecordingSpan(spanContext); -} -exports.wrapSpanContext = wrapSpanContext; //# sourceMappingURL=spancontext-utils.js.map /***/ }), @@ -37926,15 +38555,27 @@ module.exports = clean /* 508 */, /* 509 */, /* 510 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { +/***/ (function(__unusedmodule, exports) { "use strict"; -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ Object.defineProperty(exports, "__esModule", { value: true }); -__webpack_require__(71); - +//# sourceMappingURL=Entry.js.map /***/ }), /* 511 */, @@ -38251,33 +38892,33 @@ exports.CompareCache = CompareCache; */ Object.defineProperty(exports, "__esModule", { value: true }); exports.unregisterGlobal = exports.getGlobal = exports.registerGlobal = void 0; +var __1 = __webpack_require__(440); var platform_1 = __webpack_require__(127); var version_1 = __webpack_require__(133); var semver_1 = __webpack_require__(987); var major = version_1.VERSION.split('.')[0]; -var GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for("opentelemetry.js.api." + major); +var GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for("io.opentelemetry.js.api." + major); var _global = platform_1._globalThis; -function registerGlobal(type, instance, diag, allowOverride) { +function registerGlobal(type, instance, allowOverride) { var _a; if (allowOverride === void 0) { allowOverride = false; } - var api = (_global[GLOBAL_OPENTELEMETRY_API_KEY] = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) !== null && _a !== void 0 ? _a : { + _global[GLOBAL_OPENTELEMETRY_API_KEY] = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) !== null && _a !== void 0 ? _a : { version: version_1.VERSION, - }); + }; + var api = _global[GLOBAL_OPENTELEMETRY_API_KEY]; if (!allowOverride && api[type]) { // already registered an API of this type var err = new Error("@opentelemetry/api: Attempted duplicate registration of API: " + type); - diag.error(err.stack || err.message); - return false; + __1.diag.error(err.stack || err.message); + return; } if (api.version !== version_1.VERSION) { // All registered APIs must be of the same version exactly var err = new Error('@opentelemetry/api: All API registration versions must match'); - diag.error(err.stack || err.message); - return false; + __1.diag.error(err.stack || err.message); + return; } api[type] = instance; - diag.debug("@opentelemetry/api: Registered a global for " + type + " v" + version_1.VERSION + "."); - return true; } exports.registerGlobal = registerGlobal; function getGlobal(type) { @@ -38289,8 +38930,7 @@ function getGlobal(type) { return (_b = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _b === void 0 ? void 0 : _b[type]; } exports.getGlobal = getGlobal; -function unregisterGlobal(type, diag) { - diag.debug("@opentelemetry/api: Unregistering a global for " + type + " v" + version_1.VERSION + "."); +function unregisterGlobal(type) { var api = _global[GLOBAL_OPENTELEMETRY_API_KEY]; if (api) { delete api[type]; @@ -45749,7 +46389,76 @@ exports.EventTargetImpl = EventTargetImpl; /***/ }), /* 598 */, -/* 599 */, +/* 599 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BaggageImpl = void 0; +var BaggageImpl = /** @class */ (function () { + function BaggageImpl(entries) { + this._entries = entries ? new Map(entries) : new Map(); + } + BaggageImpl.prototype.getEntry = function (key) { + var entry = this._entries.get(key); + if (!entry) { + return undefined; + } + return Object.assign({}, entry); + }; + BaggageImpl.prototype.getAllEntries = function () { + return Array.from(this._entries.entries()).map(function (_a) { + var k = _a[0], v = _a[1]; + return [k, v]; + }); + }; + BaggageImpl.prototype.setEntry = function (key, entry) { + var newBaggage = new BaggageImpl(this._entries); + newBaggage._entries.set(key, entry); + return newBaggage; + }; + BaggageImpl.prototype.removeEntry = function (key) { + var newBaggage = new BaggageImpl(this._entries); + newBaggage._entries.delete(key); + return newBaggage; + }; + BaggageImpl.prototype.removeEntries = function () { + var keys = []; + for (var _i = 0; _i < arguments.length; _i++) { + keys[_i] = arguments[_i]; + } + var newBaggage = new BaggageImpl(this._entries); + for (var _a = 0, keys_1 = keys; _a < keys_1.length; _a++) { + var key = keys_1[_a]; + newBaggage._entries.delete(key); + } + return newBaggage; + }; + BaggageImpl.prototype.clear = function () { + return new BaggageImpl(); + }; + return BaggageImpl; +}()); +exports.BaggageImpl = BaggageImpl; +//# sourceMappingURL=baggage.js.map + +/***/ }), /* 600 */, /* 601 */ /***/ (function(__unusedmodule, exports, __webpack_require__) { @@ -47461,7 +48170,7 @@ exports.NodeListImpl = NodeListImpl; * limitations under the License. */ Object.defineProperty(exports, "__esModule", { value: true }); -exports.NoopTextMapPropagator = void 0; +exports.NOOP_TEXT_MAP_PROPAGATOR = exports.NoopTextMapPropagator = void 0; /** * No-op implementations of {@link TextMapPropagator}. */ @@ -47480,6 +48189,7 @@ var NoopTextMapPropagator = /** @class */ (function () { return NoopTextMapPropagator; }()); exports.NoopTextMapPropagator = NoopTextMapPropagator; +exports.NOOP_TEXT_MAP_PROPAGATOR = new NoopTextMapPropagator(); //# sourceMappingURL=NoopTextMapPropagator.js.map /***/ }), @@ -47789,7 +48499,7 @@ var __createBinding; ar[i] = from[i]; } } - return to.concat(ar || Array.prototype.slice.call(from)); + return to.concat(ar || from); }; __await = function (v) { @@ -51244,76 +51954,7 @@ exports.namespace_extractQName = namespace_extractQName; /***/ }), /* 665 */, -/* 666 */ -/***/ (function(__unusedmodule, exports) { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.BaggageImpl = void 0; -var BaggageImpl = /** @class */ (function () { - function BaggageImpl(entries) { - this._entries = entries ? new Map(entries) : new Map(); - } - BaggageImpl.prototype.getEntry = function (key) { - var entry = this._entries.get(key); - if (!entry) { - return undefined; - } - return Object.assign({}, entry); - }; - BaggageImpl.prototype.getAllEntries = function () { - return Array.from(this._entries.entries()).map(function (_a) { - var k = _a[0], v = _a[1]; - return [k, v]; - }); - }; - BaggageImpl.prototype.setEntry = function (key, entry) { - var newBaggage = new BaggageImpl(this._entries); - newBaggage._entries.set(key, entry); - return newBaggage; - }; - BaggageImpl.prototype.removeEntry = function (key) { - var newBaggage = new BaggageImpl(this._entries); - newBaggage._entries.delete(key); - return newBaggage; - }; - BaggageImpl.prototype.removeEntries = function () { - var keys = []; - for (var _i = 0; _i < arguments.length; _i++) { - keys[_i] = arguments[_i]; - } - var newBaggage = new BaggageImpl(this._entries); - for (var _a = 0, keys_1 = keys; _a < keys_1.length; _a++) { - var key = keys_1[_a]; - newBaggage._entries.delete(key); - } - return newBaggage; - }; - BaggageImpl.prototype.clear = function () { - return new BaggageImpl(); - }; - return BaggageImpl; -}()); -exports.BaggageImpl = BaggageImpl; -//# sourceMappingURL=baggage-impl.js.map - -/***/ }), +/* 666 */, /* 667 */, /* 668 */, /* 669 */ @@ -54751,7 +55392,18 @@ exports.abort_signalAbort = abort_signalAbort; //# sourceMappingURL=AbortAlgorithm.js.map /***/ }), -/* 711 */, +/* 711 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +__webpack_require__(71); + + +/***/ }), /* 712 */, /* 713 */, /* 714 */ @@ -54893,9 +55545,343 @@ module.exports = new Schema({ /***/ }), -/* 725 */, +/* 725 */ +/***/ (function(module) { + +/*! ***************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, System, Reflect, Promise */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __createBinding; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if ( true && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || from); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); +}); + + +/***/ }), /* 726 */, -/* 727 */, +/* 727 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=Event.js.map + +/***/ }), /* 728 */ /***/ (function(__unusedmodule, exports) { @@ -54920,7 +55906,7 @@ exports.SearchState = SearchState; Object.defineProperty(exports, "__esModule", { value: true }); exports.getJavaDistribution = void 0; -const installer_1 = __webpack_require__(144); +const installer_1 = __webpack_require__(757); const installer_2 = __webpack_require__(834); const installer_3 = __webpack_require__(584); const installer_4 = __webpack_require__(439); @@ -55805,10 +56791,12 @@ module.exports = cmp * See the License for the specific language governing permissions and * limitations under the License. */ -var __spreadArray = (this && this.__spreadArray) || function (to, from) { - for (var i = 0, il = from.length, j = to.length; i < il; i++, j++) - to[j] = from[i]; - return to; +var __spreadArrays = (this && this.__spreadArrays) || function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.NoopContextManager = void 0; @@ -55824,9 +56812,9 @@ var NoopContextManager = /** @class */ (function () { for (var _i = 3; _i < arguments.length; _i++) { args[_i - 3] = arguments[_i]; } - return fn.call.apply(fn, __spreadArray([thisArg], args)); + return fn.call.apply(fn, __spreadArrays([thisArg], args)); }; - NoopContextManager.prototype.bind = function (_context, target) { + NoopContextManager.prototype.bind = function (target, _context) { return target; }; NoopContextManager.prototype.enable = function () { @@ -55876,7 +56864,108 @@ exports.NoopContextManager = NoopContextManager; /***/ }), /* 756 */, -/* 757 */, +/* 757 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.LocalDistribution = void 0; +const tc = __importStar(__webpack_require__(139)); +const core = __importStar(__webpack_require__(470)); +const fs_1 = __importDefault(__webpack_require__(747)); +const path_1 = __importDefault(__webpack_require__(622)); +const base_installer_1 = __webpack_require__(83); +const util_1 = __webpack_require__(322); +const constants_1 = __webpack_require__(211); +class LocalDistribution extends base_installer_1.JavaBase { + constructor(installerOptions, jdkFile) { + super('jdkfile', installerOptions); + this.jdkFile = jdkFile; + } + setupJava() { + return __awaiter(this, void 0, void 0, function* () { + let foundJava = this.findInToolcache(); + if (foundJava) { + core.info(`Resolved Java ${foundJava.version} from tool-cache`); + } + else { + core.info(`Java ${this.version} was not found in tool-cache. Trying to unpack JDK file...`); + if (!this.jdkFile) { + throw new Error("'jdkFile' is not specified"); + } + const jdkFilePath = path_1.default.resolve(this.jdkFile); + const stats = fs_1.default.statSync(jdkFilePath); + if (!stats.isFile()) { + throw new Error(`JDK file was not found in path '${jdkFilePath}'`); + } + core.info(`Extracting Java from '${jdkFilePath}'`); + const extractedJavaPath = yield util_1.extractJdkFile(jdkFilePath); + const archiveName = fs_1.default.readdirSync(extractedJavaPath)[0]; + const archivePath = path_1.default.join(extractedJavaPath, archiveName); + const javaVersion = this.version; + let javaPath = yield tc.cacheDir(archivePath, this.toolcacheFolderName, this.getToolcacheVersionName(javaVersion), this.architecture); + // for different Java distributions, postfix can exist or not so need to check both cases + if (process.platform === 'darwin' && + fs_1.default.existsSync(path_1.default.join(javaPath, constants_1.MACOS_JAVA_CONTENT_POSTFIX))) { + javaPath = path_1.default.join(javaPath, constants_1.MACOS_JAVA_CONTENT_POSTFIX); + } + foundJava = { + version: javaVersion, + path: javaPath + }; + } + core.info(`Setting Java ${foundJava.version} as default`); + this.setJavaDefault(foundJava.version, foundJava.path); + return foundJava; + }); + } + findPackageForDownload(version) { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('This method should not be implemented in local file provider'); + }); + } + downloadTool(javaRelease) { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('This method should not be implemented in local file provider'); + }); + } +} +exports.LocalDistribution = LocalDistribution; + + +/***/ }), /* 758 */, /* 759 */, /* 760 */ @@ -56292,7 +57381,77 @@ module.exports = function(dst, src) { /***/ }), -/* 767 */, +/* 767 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NoopSpan = void 0; +var spancontext_utils_1 = __webpack_require__(453); +/** + * The NoopSpan is the default {@link Span} that is used when no Span + * implementation is available. All operations are no-op including context + * propagation. + */ +var NoopSpan = /** @class */ (function () { + function NoopSpan(_spanContext) { + if (_spanContext === void 0) { _spanContext = spancontext_utils_1.INVALID_SPAN_CONTEXT; } + this._spanContext = _spanContext; + } + // Returns a SpanContext. + NoopSpan.prototype.context = function () { + return this._spanContext; + }; + // By default does nothing + NoopSpan.prototype.setAttribute = function (_key, _value) { + return this; + }; + // By default does nothing + NoopSpan.prototype.setAttributes = function (_attributes) { + return this; + }; + // By default does nothing + NoopSpan.prototype.addEvent = function (_name, _attributes) { + return this; + }; + // By default does nothing + NoopSpan.prototype.setStatus = function (_status) { + return this; + }; + // By default does nothing + NoopSpan.prototype.updateName = function (_name) { + return this; + }; + // By default does nothing + NoopSpan.prototype.end = function (_endTime) { }; + // isRecording always returns false for noopSpan. + NoopSpan.prototype.isRecording = function () { + return false; + }; + // By default does nothing + NoopSpan.prototype.recordException = function (_exception, _time) { }; + return NoopSpan; +}()); +exports.NoopSpan = NoopSpan; +//# sourceMappingURL=NoopSpan.js.map + +/***/ }), /* 768 */ /***/ (function(module, __unusedexports, __webpack_require__) { @@ -58362,7 +59521,74 @@ FormData.prototype.toString = function () { /***/ }), -/* 792 */, +/* 792 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.baggageEntryMetadataFromString = exports.createBaggage = void 0; +var baggage_1 = __webpack_require__(599); +var symbol_1 = __webpack_require__(561); +__exportStar(__webpack_require__(938), exports); +__exportStar(__webpack_require__(510), exports); +/** + * Create a new Baggage with optional entries + * + * @param entries An array of baggage entries the new baggage should contain + */ +function createBaggage(entries) { + if (entries === void 0) { entries = {}; } + return new baggage_1.BaggageImpl(new Map(Object.entries(entries))); +} +exports.createBaggage = createBaggage; +/** + * Create a serializable BaggageEntryMetadata object from a string. + * + * @param str string metadata. Format is currently not defined by the spec and has no special meaning. + * + */ +function baggageEntryMetadataFromString(str) { + if (typeof str !== 'string') { + // @TODO log diagnostic + str = ''; + } + return { + __TYPE__: symbol_1.baggageEntryMetadataSymbol, + toString: function () { + return str; + }, + }; +} +exports.baggageEntryMetadataFromString = baggageEntryMetadataFromString; +//# sourceMappingURL=index.js.map + +/***/ }), /* 793 */, /* 794 */ /***/ (function(module) { @@ -61758,7 +62984,7 @@ var __createBinding; ar[i] = from[i]; } } - return to.concat(ar || Array.prototype.slice.call(from)); + return to.concat(ar || from); }; __await = function (v) { @@ -62292,7 +63518,7 @@ var abortController = __webpack_require__(819); var os = __webpack_require__(87); var crypto = __webpack_require__(373); var stream = __webpack_require__(794); -__webpack_require__(510); +__webpack_require__(711); var coreLro = __webpack_require__(110); var events = __webpack_require__(614); var fs = __webpack_require__(747); @@ -62305,7 +63531,7 @@ var util = __webpack_require__(669); * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -const BlobServiceProperties = { +var BlobServiceProperties = { serializedName: "BlobServiceProperties", xmlName: "StorageServiceProperties", type: { @@ -62377,7 +63603,7 @@ const BlobServiceProperties = { } } }; -const Logging = { +var Logging = { serializedName: "Logging", type: { name: "Composite", @@ -62426,7 +63652,7 @@ const Logging = { } } }; -const RetentionPolicy = { +var RetentionPolicy = { serializedName: "RetentionPolicy", type: { name: "Composite", @@ -62453,7 +63679,7 @@ const RetentionPolicy = { } } }; -const Metrics = { +var Metrics = { serializedName: "Metrics", type: { name: "Composite", @@ -62492,7 +63718,7 @@ const Metrics = { } } }; -const CorsRule = { +var CorsRule = { serializedName: "CorsRule", type: { name: "Composite", @@ -62544,7 +63770,7 @@ const CorsRule = { } } }; -const StaticWebsite = { +var StaticWebsite = { serializedName: "StaticWebsite", type: { name: "Composite", @@ -62582,7 +63808,7 @@ const StaticWebsite = { } } }; -const StorageError = { +var StorageError = { serializedName: "StorageError", type: { name: "Composite", @@ -62605,7 +63831,7 @@ const StorageError = { } } }; -const BlobServiceStatistics = { +var BlobServiceStatistics = { serializedName: "BlobServiceStatistics", xmlName: "StorageServiceStats", type: { @@ -62623,7 +63849,7 @@ const BlobServiceStatistics = { } } }; -const GeoReplication = { +var GeoReplication = { serializedName: "GeoReplication", type: { name: "Composite", @@ -62649,7 +63875,7 @@ const GeoReplication = { } } }; -const ListContainersSegmentResponse = { +var ListContainersSegmentResponse = { serializedName: "ListContainersSegmentResponse", xmlName: "EnumerationResults", type: { @@ -62712,7 +63938,7 @@ const ListContainersSegmentResponse = { } } }; -const ContainerItem = { +var ContainerItem = { serializedName: "ContainerItem", xmlName: "Container", type: { @@ -62760,7 +63986,7 @@ const ContainerItem = { } } }; -const ContainerProperties = { +var ContainerProperties = { serializedName: "ContainerProperties", type: { name: "Composite", @@ -62865,7 +64091,7 @@ const ContainerProperties = { } } }; -const KeyInfo = { +var KeyInfo = { serializedName: "KeyInfo", type: { name: "Composite", @@ -62890,7 +64116,7 @@ const KeyInfo = { } } }; -const UserDelegationKey = { +var UserDelegationKey = { serializedName: "UserDelegationKey", type: { name: "Composite", @@ -62955,7 +64181,7 @@ const UserDelegationKey = { } } }; -const FilterBlobSegment = { +var FilterBlobSegment = { serializedName: "FilterBlobSegment", xmlName: "EnumerationResults", type: { @@ -63005,7 +64231,7 @@ const FilterBlobSegment = { } } }; -const FilterBlobItem = { +var FilterBlobItem = { serializedName: "FilterBlobItem", xmlName: "Blob", type: { @@ -63039,7 +64265,7 @@ const FilterBlobItem = { } } }; -const BlobTags = { +var BlobTags = { serializedName: "BlobTags", xmlName: "Tags", type: { @@ -63065,7 +64291,7 @@ const BlobTags = { } } }; -const BlobTag = { +var BlobTag = { serializedName: "BlobTag", xmlName: "Tag", type: { @@ -63091,7 +64317,7 @@ const BlobTag = { } } }; -const SignedIdentifier = { +var SignedIdentifier = { serializedName: "SignedIdentifier", xmlName: "SignedIdentifier", type: { @@ -63117,7 +64343,7 @@ const SignedIdentifier = { } } }; -const AccessPolicy = { +var AccessPolicy = { serializedName: "AccessPolicy", type: { name: "Composite", @@ -63147,7 +64373,7 @@ const AccessPolicy = { } } }; -const ListBlobsFlatSegmentResponse = { +var ListBlobsFlatSegmentResponse = { serializedName: "ListBlobsFlatSegmentResponse", xmlName: "EnumerationResults", type: { @@ -63211,7 +64437,7 @@ const ListBlobsFlatSegmentResponse = { } } }; -const BlobFlatListSegment = { +var BlobFlatListSegment = { serializedName: "BlobFlatListSegment", xmlName: "Blobs", type: { @@ -63236,7 +64462,7 @@ const BlobFlatListSegment = { } } }; -const BlobItemInternal = { +var BlobItemInternal = { serializedName: "BlobItemInternal", xmlName: "Blob", type: { @@ -63316,7 +64542,7 @@ const BlobItemInternal = { } } }; -const BlobPropertiesInternal = { +var BlobPropertiesInternal = { serializedName: "BlobPropertiesInternal", xmlName: "Properties", type: { @@ -63619,7 +64845,7 @@ const BlobPropertiesInternal = { } } }; -const ListBlobsHierarchySegmentResponse = { +var ListBlobsHierarchySegmentResponse = { serializedName: "ListBlobsHierarchySegmentResponse", xmlName: "EnumerationResults", type: { @@ -63690,7 +64916,7 @@ const ListBlobsHierarchySegmentResponse = { } } }; -const BlobHierarchyListSegment = { +var BlobHierarchyListSegment = { serializedName: "BlobHierarchyListSegment", xmlName: "Blobs", type: { @@ -63729,7 +64955,7 @@ const BlobHierarchyListSegment = { } } }; -const BlobPrefix = { +var BlobPrefix = { serializedName: "BlobPrefix", type: { name: "Composite", @@ -63746,7 +64972,7 @@ const BlobPrefix = { } } }; -const DataLakeStorageError = { +var DataLakeStorageError = { serializedName: "DataLakeStorageError", type: { name: "Composite", @@ -63763,7 +64989,7 @@ const DataLakeStorageError = { } } }; -const DataLakeStorageErrorError = { +var DataLakeStorageErrorError = { serializedName: "DataLakeStorageErrorError", type: { name: "Composite", @@ -63786,7 +65012,7 @@ const DataLakeStorageErrorError = { } } }; -const BlockLookupList = { +var BlockLookupList = { serializedName: "BlockLookupList", xmlName: "BlockList", type: { @@ -63835,7 +65061,7 @@ const BlockLookupList = { } } }; -const BlockList = { +var BlockList = { serializedName: "BlockList", type: { name: "Composite", @@ -63874,7 +65100,7 @@ const BlockList = { } } }; -const Block = { +var Block = { serializedName: "Block", type: { name: "Composite", @@ -63899,7 +65125,7 @@ const Block = { } } }; -const PageList = { +var PageList = { serializedName: "PageList", type: { name: "Composite", @@ -63936,7 +65162,7 @@ const PageList = { } } }; -const PageRange = { +var PageRange = { serializedName: "PageRange", xmlName: "PageRange", type: { @@ -63962,7 +65188,7 @@ const PageRange = { } } }; -const ClearRange = { +var ClearRange = { serializedName: "ClearRange", xmlName: "ClearRange", type: { @@ -63988,7 +65214,7 @@ const ClearRange = { } } }; -const QueryRequest = { +var QueryRequest = { serializedName: "QueryRequest", xmlName: "QueryRequest", type: { @@ -64030,7 +65256,7 @@ const QueryRequest = { } } }; -const QuerySerialization = { +var QuerySerialization = { serializedName: "QuerySerialization", type: { name: "Composite", @@ -64047,7 +65273,7 @@ const QuerySerialization = { } } }; -const QueryFormat = { +var QueryFormat = { serializedName: "QueryFormat", type: { name: "Composite", @@ -64088,7 +65314,7 @@ const QueryFormat = { } } }; -const DelimitedTextConfiguration = { +var DelimitedTextConfiguration = { serializedName: "DelimitedTextConfiguration", xmlName: "DelimitedTextConfiguration", type: { @@ -64138,7 +65364,7 @@ const DelimitedTextConfiguration = { } } }; -const JsonTextConfiguration = { +var JsonTextConfiguration = { serializedName: "JsonTextConfiguration", xmlName: "JsonTextConfiguration", type: { @@ -64156,7 +65382,7 @@ const JsonTextConfiguration = { } } }; -const ArrowConfiguration = { +var ArrowConfiguration = { serializedName: "ArrowConfiguration", xmlName: "ArrowConfiguration", type: { @@ -64182,7 +65408,7 @@ const ArrowConfiguration = { } } }; -const ArrowField = { +var ArrowField = { serializedName: "ArrowField", xmlName: "Field", type: { @@ -64221,7 +65447,7 @@ const ArrowField = { } } }; -const ServiceSetPropertiesHeaders = { +var ServiceSetPropertiesHeaders = { serializedName: "Service_setPropertiesHeaders", type: { name: "Composite", @@ -64258,7 +65484,7 @@ const ServiceSetPropertiesHeaders = { } } }; -const ServiceSetPropertiesExceptionHeaders = { +var ServiceSetPropertiesExceptionHeaders = { serializedName: "Service_setPropertiesExceptionHeaders", type: { name: "Composite", @@ -64274,7 +65500,7 @@ const ServiceSetPropertiesExceptionHeaders = { } } }; -const ServiceGetPropertiesHeaders = { +var ServiceGetPropertiesHeaders = { serializedName: "Service_getPropertiesHeaders", type: { name: "Composite", @@ -64311,7 +65537,7 @@ const ServiceGetPropertiesHeaders = { } } }; -const ServiceGetPropertiesExceptionHeaders = { +var ServiceGetPropertiesExceptionHeaders = { serializedName: "Service_getPropertiesExceptionHeaders", type: { name: "Composite", @@ -64327,7 +65553,7 @@ const ServiceGetPropertiesExceptionHeaders = { } } }; -const ServiceGetStatisticsHeaders = { +var ServiceGetStatisticsHeaders = { serializedName: "Service_getStatisticsHeaders", type: { name: "Composite", @@ -64371,7 +65597,7 @@ const ServiceGetStatisticsHeaders = { } } }; -const ServiceGetStatisticsExceptionHeaders = { +var ServiceGetStatisticsExceptionHeaders = { serializedName: "Service_getStatisticsExceptionHeaders", type: { name: "Composite", @@ -64387,7 +65613,7 @@ const ServiceGetStatisticsExceptionHeaders = { } } }; -const ServiceListContainersSegmentHeaders = { +var ServiceListContainersSegmentHeaders = { serializedName: "Service_listContainersSegmentHeaders", type: { name: "Composite", @@ -64424,7 +65650,7 @@ const ServiceListContainersSegmentHeaders = { } } }; -const ServiceListContainersSegmentExceptionHeaders = { +var ServiceListContainersSegmentExceptionHeaders = { serializedName: "Service_listContainersSegmentExceptionHeaders", type: { name: "Composite", @@ -64440,7 +65666,7 @@ const ServiceListContainersSegmentExceptionHeaders = { } } }; -const ServiceGetUserDelegationKeyHeaders = { +var ServiceGetUserDelegationKeyHeaders = { serializedName: "Service_getUserDelegationKeyHeaders", type: { name: "Composite", @@ -64484,7 +65710,7 @@ const ServiceGetUserDelegationKeyHeaders = { } } }; -const ServiceGetUserDelegationKeyExceptionHeaders = { +var ServiceGetUserDelegationKeyExceptionHeaders = { serializedName: "Service_getUserDelegationKeyExceptionHeaders", type: { name: "Composite", @@ -64500,7 +65726,7 @@ const ServiceGetUserDelegationKeyExceptionHeaders = { } } }; -const ServiceGetAccountInfoHeaders = { +var ServiceGetAccountInfoHeaders = { serializedName: "Service_getAccountInfoHeaders", type: { name: "Composite", @@ -64579,7 +65805,7 @@ const ServiceGetAccountInfoHeaders = { } } }; -const ServiceGetAccountInfoExceptionHeaders = { +var ServiceGetAccountInfoExceptionHeaders = { serializedName: "Service_getAccountInfoExceptionHeaders", type: { name: "Composite", @@ -64595,7 +65821,7 @@ const ServiceGetAccountInfoExceptionHeaders = { } } }; -const ServiceSubmitBatchHeaders = { +var ServiceSubmitBatchHeaders = { serializedName: "Service_submitBatchHeaders", type: { name: "Composite", @@ -64639,7 +65865,7 @@ const ServiceSubmitBatchHeaders = { } } }; -const ServiceSubmitBatchExceptionHeaders = { +var ServiceSubmitBatchExceptionHeaders = { serializedName: "Service_submitBatchExceptionHeaders", type: { name: "Composite", @@ -64655,7 +65881,7 @@ const ServiceSubmitBatchExceptionHeaders = { } } }; -const ServiceFilterBlobsHeaders = { +var ServiceFilterBlobsHeaders = { serializedName: "Service_filterBlobsHeaders", type: { name: "Composite", @@ -64699,7 +65925,7 @@ const ServiceFilterBlobsHeaders = { } } }; -const ServiceFilterBlobsExceptionHeaders = { +var ServiceFilterBlobsExceptionHeaders = { serializedName: "Service_filterBlobsExceptionHeaders", type: { name: "Composite", @@ -64715,7 +65941,7 @@ const ServiceFilterBlobsExceptionHeaders = { } } }; -const ContainerCreateHeaders = { +var ContainerCreateHeaders = { serializedName: "Container_createHeaders", type: { name: "Composite", @@ -64773,7 +65999,7 @@ const ContainerCreateHeaders = { } } }; -const ContainerCreateExceptionHeaders = { +var ContainerCreateExceptionHeaders = { serializedName: "Container_createExceptionHeaders", type: { name: "Composite", @@ -64789,7 +66015,7 @@ const ContainerCreateExceptionHeaders = { } } }; -const ContainerGetPropertiesHeaders = { +var ContainerGetPropertiesHeaders = { serializedName: "Container_getPropertiesHeaders", type: { name: "Composite", @@ -64922,7 +66148,7 @@ const ContainerGetPropertiesHeaders = { } } }; -const ContainerGetPropertiesExceptionHeaders = { +var ContainerGetPropertiesExceptionHeaders = { serializedName: "Container_getPropertiesExceptionHeaders", type: { name: "Composite", @@ -64938,7 +66164,7 @@ const ContainerGetPropertiesExceptionHeaders = { } } }; -const ContainerDeleteHeaders = { +var ContainerDeleteHeaders = { serializedName: "Container_deleteHeaders", type: { name: "Composite", @@ -64982,7 +66208,7 @@ const ContainerDeleteHeaders = { } } }; -const ContainerDeleteExceptionHeaders = { +var ContainerDeleteExceptionHeaders = { serializedName: "Container_deleteExceptionHeaders", type: { name: "Composite", @@ -64998,7 +66224,7 @@ const ContainerDeleteExceptionHeaders = { } } }; -const ContainerSetMetadataHeaders = { +var ContainerSetMetadataHeaders = { serializedName: "Container_setMetadataHeaders", type: { name: "Composite", @@ -65056,7 +66282,7 @@ const ContainerSetMetadataHeaders = { } } }; -const ContainerSetMetadataExceptionHeaders = { +var ContainerSetMetadataExceptionHeaders = { serializedName: "Container_setMetadataExceptionHeaders", type: { name: "Composite", @@ -65072,7 +66298,7 @@ const ContainerSetMetadataExceptionHeaders = { } } }; -const ContainerGetAccessPolicyHeaders = { +var ContainerGetAccessPolicyHeaders = { serializedName: "Container_getAccessPolicyHeaders", type: { name: "Composite", @@ -65138,7 +66364,7 @@ const ContainerGetAccessPolicyHeaders = { } } }; -const ContainerGetAccessPolicyExceptionHeaders = { +var ContainerGetAccessPolicyExceptionHeaders = { serializedName: "Container_getAccessPolicyExceptionHeaders", type: { name: "Composite", @@ -65154,7 +66380,7 @@ const ContainerGetAccessPolicyExceptionHeaders = { } } }; -const ContainerSetAccessPolicyHeaders = { +var ContainerSetAccessPolicyHeaders = { serializedName: "Container_setAccessPolicyHeaders", type: { name: "Composite", @@ -65212,7 +66438,7 @@ const ContainerSetAccessPolicyHeaders = { } } }; -const ContainerSetAccessPolicyExceptionHeaders = { +var ContainerSetAccessPolicyExceptionHeaders = { serializedName: "Container_setAccessPolicyExceptionHeaders", type: { name: "Composite", @@ -65228,7 +66454,7 @@ const ContainerSetAccessPolicyExceptionHeaders = { } } }; -const ContainerRestoreHeaders = { +var ContainerRestoreHeaders = { serializedName: "Container_restoreHeaders", type: { name: "Composite", @@ -65272,7 +66498,7 @@ const ContainerRestoreHeaders = { } } }; -const ContainerRestoreExceptionHeaders = { +var ContainerRestoreExceptionHeaders = { serializedName: "Container_restoreExceptionHeaders", type: { name: "Composite", @@ -65288,7 +66514,7 @@ const ContainerRestoreExceptionHeaders = { } } }; -const ContainerRenameHeaders = { +var ContainerRenameHeaders = { serializedName: "Container_renameHeaders", type: { name: "Composite", @@ -65332,7 +66558,7 @@ const ContainerRenameHeaders = { } } }; -const ContainerRenameExceptionHeaders = { +var ContainerRenameExceptionHeaders = { serializedName: "Container_renameExceptionHeaders", type: { name: "Composite", @@ -65348,7 +66574,7 @@ const ContainerRenameExceptionHeaders = { } } }; -const ContainerSubmitBatchHeaders = { +var ContainerSubmitBatchHeaders = { serializedName: "Container_submitBatchHeaders", type: { name: "Composite", @@ -65378,7 +66604,7 @@ const ContainerSubmitBatchHeaders = { } } }; -const ContainerSubmitBatchExceptionHeaders = { +var ContainerSubmitBatchExceptionHeaders = { serializedName: "Container_submitBatchExceptionHeaders", type: { name: "Composite", @@ -65394,7 +66620,7 @@ const ContainerSubmitBatchExceptionHeaders = { } } }; -const ContainerAcquireLeaseHeaders = { +var ContainerAcquireLeaseHeaders = { serializedName: "Container_acquireLeaseHeaders", type: { name: "Composite", @@ -65452,7 +66678,7 @@ const ContainerAcquireLeaseHeaders = { } } }; -const ContainerAcquireLeaseExceptionHeaders = { +var ContainerAcquireLeaseExceptionHeaders = { serializedName: "Container_acquireLeaseExceptionHeaders", type: { name: "Composite", @@ -65468,7 +66694,7 @@ const ContainerAcquireLeaseExceptionHeaders = { } } }; -const ContainerReleaseLeaseHeaders = { +var ContainerReleaseLeaseHeaders = { serializedName: "Container_releaseLeaseHeaders", type: { name: "Composite", @@ -65519,7 +66745,7 @@ const ContainerReleaseLeaseHeaders = { } } }; -const ContainerReleaseLeaseExceptionHeaders = { +var ContainerReleaseLeaseExceptionHeaders = { serializedName: "Container_releaseLeaseExceptionHeaders", type: { name: "Composite", @@ -65535,7 +66761,7 @@ const ContainerReleaseLeaseExceptionHeaders = { } } }; -const ContainerRenewLeaseHeaders = { +var ContainerRenewLeaseHeaders = { serializedName: "Container_renewLeaseHeaders", type: { name: "Composite", @@ -65593,7 +66819,7 @@ const ContainerRenewLeaseHeaders = { } } }; -const ContainerRenewLeaseExceptionHeaders = { +var ContainerRenewLeaseExceptionHeaders = { serializedName: "Container_renewLeaseExceptionHeaders", type: { name: "Composite", @@ -65609,7 +66835,7 @@ const ContainerRenewLeaseExceptionHeaders = { } } }; -const ContainerBreakLeaseHeaders = { +var ContainerBreakLeaseHeaders = { serializedName: "Container_breakLeaseHeaders", type: { name: "Composite", @@ -65667,7 +66893,7 @@ const ContainerBreakLeaseHeaders = { } } }; -const ContainerBreakLeaseExceptionHeaders = { +var ContainerBreakLeaseExceptionHeaders = { serializedName: "Container_breakLeaseExceptionHeaders", type: { name: "Composite", @@ -65683,7 +66909,7 @@ const ContainerBreakLeaseExceptionHeaders = { } } }; -const ContainerChangeLeaseHeaders = { +var ContainerChangeLeaseHeaders = { serializedName: "Container_changeLeaseHeaders", type: { name: "Composite", @@ -65741,7 +66967,7 @@ const ContainerChangeLeaseHeaders = { } } }; -const ContainerChangeLeaseExceptionHeaders = { +var ContainerChangeLeaseExceptionHeaders = { serializedName: "Container_changeLeaseExceptionHeaders", type: { name: "Composite", @@ -65757,7 +66983,7 @@ const ContainerChangeLeaseExceptionHeaders = { } } }; -const ContainerListBlobFlatSegmentHeaders = { +var ContainerListBlobFlatSegmentHeaders = { serializedName: "Container_listBlobFlatSegmentHeaders", type: { name: "Composite", @@ -65808,7 +67034,7 @@ const ContainerListBlobFlatSegmentHeaders = { } } }; -const ContainerListBlobFlatSegmentExceptionHeaders = { +var ContainerListBlobFlatSegmentExceptionHeaders = { serializedName: "Container_listBlobFlatSegmentExceptionHeaders", type: { name: "Composite", @@ -65824,7 +67050,7 @@ const ContainerListBlobFlatSegmentExceptionHeaders = { } } }; -const ContainerListBlobHierarchySegmentHeaders = { +var ContainerListBlobHierarchySegmentHeaders = { serializedName: "Container_listBlobHierarchySegmentHeaders", type: { name: "Composite", @@ -65875,7 +67101,7 @@ const ContainerListBlobHierarchySegmentHeaders = { } } }; -const ContainerListBlobHierarchySegmentExceptionHeaders = { +var ContainerListBlobHierarchySegmentExceptionHeaders = { serializedName: "Container_listBlobHierarchySegmentExceptionHeaders", type: { name: "Composite", @@ -65891,7 +67117,7 @@ const ContainerListBlobHierarchySegmentExceptionHeaders = { } } }; -const ContainerGetAccountInfoHeaders = { +var ContainerGetAccountInfoHeaders = { serializedName: "Container_getAccountInfoHeaders", type: { name: "Composite", @@ -65956,7 +67182,7 @@ const ContainerGetAccountInfoHeaders = { } } }; -const ContainerGetAccountInfoExceptionHeaders = { +var ContainerGetAccountInfoExceptionHeaders = { serializedName: "Container_getAccountInfoExceptionHeaders", type: { name: "Composite", @@ -65972,7 +67198,7 @@ const ContainerGetAccountInfoExceptionHeaders = { } } }; -const DirectoryCreateHeaders = { +var DirectoryCreateHeaders = { serializedName: "Directory_createHeaders", type: { name: "Composite", @@ -66030,7 +67256,7 @@ const DirectoryCreateHeaders = { } } }; -const DirectoryCreateExceptionHeaders = { +var DirectoryCreateExceptionHeaders = { serializedName: "Directory_createExceptionHeaders", type: { name: "Composite", @@ -66060,7 +67286,7 @@ const DirectoryCreateExceptionHeaders = { } } }; -const DirectoryRenameHeaders = { +var DirectoryRenameHeaders = { serializedName: "Directory_renameHeaders", type: { name: "Composite", @@ -66125,7 +67351,7 @@ const DirectoryRenameHeaders = { } } }; -const DirectoryRenameExceptionHeaders = { +var DirectoryRenameExceptionHeaders = { serializedName: "Directory_renameExceptionHeaders", type: { name: "Composite", @@ -66155,7 +67381,7 @@ const DirectoryRenameExceptionHeaders = { } } }; -const DirectoryDeleteHeaders = { +var DirectoryDeleteHeaders = { serializedName: "Directory_deleteHeaders", type: { name: "Composite", @@ -66199,7 +67425,7 @@ const DirectoryDeleteHeaders = { } } }; -const DirectoryDeleteExceptionHeaders = { +var DirectoryDeleteExceptionHeaders = { serializedName: "Directory_deleteExceptionHeaders", type: { name: "Composite", @@ -66229,7 +67455,7 @@ const DirectoryDeleteExceptionHeaders = { } } }; -const DirectorySetAccessControlHeaders = { +var DirectorySetAccessControlHeaders = { serializedName: "Directory_setAccessControlHeaders", type: { name: "Composite", @@ -66273,7 +67499,7 @@ const DirectorySetAccessControlHeaders = { } } }; -const DirectorySetAccessControlExceptionHeaders = { +var DirectorySetAccessControlExceptionHeaders = { serializedName: "Directory_setAccessControlExceptionHeaders", type: { name: "Composite", @@ -66303,7 +67529,7 @@ const DirectorySetAccessControlExceptionHeaders = { } } }; -const DirectoryGetAccessControlHeaders = { +var DirectoryGetAccessControlHeaders = { serializedName: "Directory_getAccessControlHeaders", type: { name: "Composite", @@ -66375,7 +67601,7 @@ const DirectoryGetAccessControlHeaders = { } } }; -const DirectoryGetAccessControlExceptionHeaders = { +var DirectoryGetAccessControlExceptionHeaders = { serializedName: "Directory_getAccessControlExceptionHeaders", type: { name: "Composite", @@ -66405,7 +67631,7 @@ const DirectoryGetAccessControlExceptionHeaders = { } } }; -const BlobDownloadHeaders = { +var BlobDownloadHeaders = { serializedName: "Blob_downloadHeaders", type: { name: "Composite", @@ -66716,7 +67942,7 @@ const BlobDownloadHeaders = { } } }; -const BlobDownloadExceptionHeaders = { +var BlobDownloadExceptionHeaders = { serializedName: "Blob_downloadExceptionHeaders", type: { name: "Composite", @@ -66732,7 +67958,7 @@ const BlobDownloadExceptionHeaders = { } } }; -const BlobGetPropertiesHeaders = { +var BlobGetPropertiesHeaders = { serializedName: "Blob_getPropertiesHeaders", type: { name: "Composite", @@ -67086,7 +68312,7 @@ const BlobGetPropertiesHeaders = { } } }; -const BlobGetPropertiesExceptionHeaders = { +var BlobGetPropertiesExceptionHeaders = { serializedName: "Blob_getPropertiesExceptionHeaders", type: { name: "Composite", @@ -67102,7 +68328,7 @@ const BlobGetPropertiesExceptionHeaders = { } } }; -const BlobDeleteHeaders = { +var BlobDeleteHeaders = { serializedName: "Blob_deleteHeaders", type: { name: "Composite", @@ -67146,7 +68372,7 @@ const BlobDeleteHeaders = { } } }; -const BlobDeleteExceptionHeaders = { +var BlobDeleteExceptionHeaders = { serializedName: "Blob_deleteExceptionHeaders", type: { name: "Composite", @@ -67162,7 +68388,7 @@ const BlobDeleteExceptionHeaders = { } } }; -const BlobSetAccessControlHeaders = { +var BlobSetAccessControlHeaders = { serializedName: "Blob_setAccessControlHeaders", type: { name: "Composite", @@ -67206,7 +68432,7 @@ const BlobSetAccessControlHeaders = { } } }; -const BlobSetAccessControlExceptionHeaders = { +var BlobSetAccessControlExceptionHeaders = { serializedName: "Blob_setAccessControlExceptionHeaders", type: { name: "Composite", @@ -67236,7 +68462,7 @@ const BlobSetAccessControlExceptionHeaders = { } } }; -const BlobGetAccessControlHeaders = { +var BlobGetAccessControlHeaders = { serializedName: "Blob_getAccessControlHeaders", type: { name: "Composite", @@ -67308,7 +68534,7 @@ const BlobGetAccessControlHeaders = { } } }; -const BlobGetAccessControlExceptionHeaders = { +var BlobGetAccessControlExceptionHeaders = { serializedName: "Blob_getAccessControlExceptionHeaders", type: { name: "Composite", @@ -67338,7 +68564,7 @@ const BlobGetAccessControlExceptionHeaders = { } } }; -const BlobRenameHeaders = { +var BlobRenameHeaders = { serializedName: "Blob_renameHeaders", type: { name: "Composite", @@ -67396,7 +68622,7 @@ const BlobRenameHeaders = { } } }; -const BlobRenameExceptionHeaders = { +var BlobRenameExceptionHeaders = { serializedName: "Blob_renameExceptionHeaders", type: { name: "Composite", @@ -67426,7 +68652,7 @@ const BlobRenameExceptionHeaders = { } } }; -const BlobUndeleteHeaders = { +var BlobUndeleteHeaders = { serializedName: "Blob_undeleteHeaders", type: { name: "Composite", @@ -67470,7 +68696,7 @@ const BlobUndeleteHeaders = { } } }; -const BlobUndeleteExceptionHeaders = { +var BlobUndeleteExceptionHeaders = { serializedName: "Blob_undeleteExceptionHeaders", type: { name: "Composite", @@ -67486,7 +68712,7 @@ const BlobUndeleteExceptionHeaders = { } } }; -const BlobSetExpiryHeaders = { +var BlobSetExpiryHeaders = { serializedName: "Blob_setExpiryHeaders", type: { name: "Composite", @@ -67537,7 +68763,7 @@ const BlobSetExpiryHeaders = { } } }; -const BlobSetExpiryExceptionHeaders = { +var BlobSetExpiryExceptionHeaders = { serializedName: "Blob_setExpiryExceptionHeaders", type: { name: "Composite", @@ -67553,7 +68779,7 @@ const BlobSetExpiryExceptionHeaders = { } } }; -const BlobSetHttpHeadersHeaders = { +var BlobSetHttpHeadersHeaders = { serializedName: "Blob_setHttpHeadersHeaders", type: { name: "Composite", @@ -67618,7 +68844,7 @@ const BlobSetHttpHeadersHeaders = { } } }; -const BlobSetHttpHeadersExceptionHeaders = { +var BlobSetHttpHeadersExceptionHeaders = { serializedName: "Blob_setHttpHeadersExceptionHeaders", type: { name: "Composite", @@ -67634,7 +68860,7 @@ const BlobSetHttpHeadersExceptionHeaders = { } } }; -const BlobSetMetadataHeaders = { +var BlobSetMetadataHeaders = { serializedName: "Blob_setMetadataHeaders", type: { name: "Composite", @@ -67720,7 +68946,7 @@ const BlobSetMetadataHeaders = { } } }; -const BlobSetMetadataExceptionHeaders = { +var BlobSetMetadataExceptionHeaders = { serializedName: "Blob_setMetadataExceptionHeaders", type: { name: "Composite", @@ -67736,7 +68962,7 @@ const BlobSetMetadataExceptionHeaders = { } } }; -const BlobAcquireLeaseHeaders = { +var BlobAcquireLeaseHeaders = { serializedName: "Blob_acquireLeaseHeaders", type: { name: "Composite", @@ -67794,7 +69020,7 @@ const BlobAcquireLeaseHeaders = { } } }; -const BlobAcquireLeaseExceptionHeaders = { +var BlobAcquireLeaseExceptionHeaders = { serializedName: "Blob_acquireLeaseExceptionHeaders", type: { name: "Composite", @@ -67810,7 +69036,7 @@ const BlobAcquireLeaseExceptionHeaders = { } } }; -const BlobReleaseLeaseHeaders = { +var BlobReleaseLeaseHeaders = { serializedName: "Blob_releaseLeaseHeaders", type: { name: "Composite", @@ -67861,7 +69087,7 @@ const BlobReleaseLeaseHeaders = { } } }; -const BlobReleaseLeaseExceptionHeaders = { +var BlobReleaseLeaseExceptionHeaders = { serializedName: "Blob_releaseLeaseExceptionHeaders", type: { name: "Composite", @@ -67877,7 +69103,7 @@ const BlobReleaseLeaseExceptionHeaders = { } } }; -const BlobRenewLeaseHeaders = { +var BlobRenewLeaseHeaders = { serializedName: "Blob_renewLeaseHeaders", type: { name: "Composite", @@ -67935,7 +69161,7 @@ const BlobRenewLeaseHeaders = { } } }; -const BlobRenewLeaseExceptionHeaders = { +var BlobRenewLeaseExceptionHeaders = { serializedName: "Blob_renewLeaseExceptionHeaders", type: { name: "Composite", @@ -67951,7 +69177,7 @@ const BlobRenewLeaseExceptionHeaders = { } } }; -const BlobChangeLeaseHeaders = { +var BlobChangeLeaseHeaders = { serializedName: "Blob_changeLeaseHeaders", type: { name: "Composite", @@ -68009,7 +69235,7 @@ const BlobChangeLeaseHeaders = { } } }; -const BlobChangeLeaseExceptionHeaders = { +var BlobChangeLeaseExceptionHeaders = { serializedName: "Blob_changeLeaseExceptionHeaders", type: { name: "Composite", @@ -68025,7 +69251,7 @@ const BlobChangeLeaseExceptionHeaders = { } } }; -const BlobBreakLeaseHeaders = { +var BlobBreakLeaseHeaders = { serializedName: "Blob_breakLeaseHeaders", type: { name: "Composite", @@ -68083,7 +69309,7 @@ const BlobBreakLeaseHeaders = { } } }; -const BlobBreakLeaseExceptionHeaders = { +var BlobBreakLeaseExceptionHeaders = { serializedName: "Blob_breakLeaseExceptionHeaders", type: { name: "Composite", @@ -68099,7 +69325,7 @@ const BlobBreakLeaseExceptionHeaders = { } } }; -const BlobCreateSnapshotHeaders = { +var BlobCreateSnapshotHeaders = { serializedName: "Blob_createSnapshotHeaders", type: { name: "Composite", @@ -68178,7 +69404,7 @@ const BlobCreateSnapshotHeaders = { } } }; -const BlobCreateSnapshotExceptionHeaders = { +var BlobCreateSnapshotExceptionHeaders = { serializedName: "Blob_createSnapshotExceptionHeaders", type: { name: "Composite", @@ -68194,7 +69420,7 @@ const BlobCreateSnapshotExceptionHeaders = { } } }; -const BlobStartCopyFromURLHeaders = { +var BlobStartCopyFromURLHeaders = { serializedName: "Blob_startCopyFromURLHeaders", type: { name: "Composite", @@ -68274,7 +69500,7 @@ const BlobStartCopyFromURLHeaders = { } } }; -const BlobStartCopyFromURLExceptionHeaders = { +var BlobStartCopyFromURLExceptionHeaders = { serializedName: "Blob_startCopyFromURLExceptionHeaders", type: { name: "Composite", @@ -68290,7 +69516,7 @@ const BlobStartCopyFromURLExceptionHeaders = { } } }; -const BlobCopyFromURLHeaders = { +var BlobCopyFromURLHeaders = { serializedName: "Blob_copyFromURLHeaders", type: { name: "Composite", @@ -68384,7 +69610,7 @@ const BlobCopyFromURLHeaders = { } } }; -const BlobCopyFromURLExceptionHeaders = { +var BlobCopyFromURLExceptionHeaders = { serializedName: "Blob_copyFromURLExceptionHeaders", type: { name: "Composite", @@ -68400,7 +69626,7 @@ const BlobCopyFromURLExceptionHeaders = { } } }; -const BlobAbortCopyFromURLHeaders = { +var BlobAbortCopyFromURLHeaders = { serializedName: "Blob_abortCopyFromURLHeaders", type: { name: "Composite", @@ -68444,7 +69670,7 @@ const BlobAbortCopyFromURLHeaders = { } } }; -const BlobAbortCopyFromURLExceptionHeaders = { +var BlobAbortCopyFromURLExceptionHeaders = { serializedName: "Blob_abortCopyFromURLExceptionHeaders", type: { name: "Composite", @@ -68460,7 +69686,7 @@ const BlobAbortCopyFromURLExceptionHeaders = { } } }; -const BlobSetTierHeaders = { +var BlobSetTierHeaders = { serializedName: "Blob_setTierHeaders", type: { name: "Composite", @@ -68497,7 +69723,7 @@ const BlobSetTierHeaders = { } } }; -const BlobSetTierExceptionHeaders = { +var BlobSetTierExceptionHeaders = { serializedName: "Blob_setTierExceptionHeaders", type: { name: "Composite", @@ -68513,7 +69739,7 @@ const BlobSetTierExceptionHeaders = { } } }; -const BlobGetAccountInfoHeaders = { +var BlobGetAccountInfoHeaders = { serializedName: "Blob_getAccountInfoHeaders", type: { name: "Composite", @@ -68578,7 +69804,7 @@ const BlobGetAccountInfoHeaders = { } } }; -const BlobGetAccountInfoExceptionHeaders = { +var BlobGetAccountInfoExceptionHeaders = { serializedName: "Blob_getAccountInfoExceptionHeaders", type: { name: "Composite", @@ -68594,7 +69820,7 @@ const BlobGetAccountInfoExceptionHeaders = { } } }; -const BlobQueryHeaders = { +var BlobQueryHeaders = { serializedName: "Blob_queryHeaders", type: { name: "Composite", @@ -68853,7 +70079,7 @@ const BlobQueryHeaders = { } } }; -const BlobQueryExceptionHeaders = { +var BlobQueryExceptionHeaders = { serializedName: "Blob_queryExceptionHeaders", type: { name: "Composite", @@ -68869,7 +70095,7 @@ const BlobQueryExceptionHeaders = { } } }; -const BlobGetTagsHeaders = { +var BlobGetTagsHeaders = { serializedName: "Blob_getTagsHeaders", type: { name: "Composite", @@ -68913,7 +70139,7 @@ const BlobGetTagsHeaders = { } } }; -const BlobGetTagsExceptionHeaders = { +var BlobGetTagsExceptionHeaders = { serializedName: "Blob_getTagsExceptionHeaders", type: { name: "Composite", @@ -68929,7 +70155,7 @@ const BlobGetTagsExceptionHeaders = { } } }; -const BlobSetTagsHeaders = { +var BlobSetTagsHeaders = { serializedName: "Blob_setTagsHeaders", type: { name: "Composite", @@ -68973,7 +70199,7 @@ const BlobSetTagsHeaders = { } } }; -const BlobSetTagsExceptionHeaders = { +var BlobSetTagsExceptionHeaders = { serializedName: "Blob_setTagsExceptionHeaders", type: { name: "Composite", @@ -68989,7 +70215,7 @@ const BlobSetTagsExceptionHeaders = { } } }; -const PageBlobCreateHeaders = { +var PageBlobCreateHeaders = { serializedName: "PageBlob_createHeaders", type: { name: "Composite", @@ -69082,7 +70308,7 @@ const PageBlobCreateHeaders = { } } }; -const PageBlobCreateExceptionHeaders = { +var PageBlobCreateExceptionHeaders = { serializedName: "PageBlob_createExceptionHeaders", type: { name: "Composite", @@ -69098,7 +70324,7 @@ const PageBlobCreateExceptionHeaders = { } } }; -const PageBlobUploadPagesHeaders = { +var PageBlobUploadPagesHeaders = { serializedName: "PageBlob_uploadPagesHeaders", type: { name: "Composite", @@ -69198,7 +70424,7 @@ const PageBlobUploadPagesHeaders = { } } }; -const PageBlobUploadPagesExceptionHeaders = { +var PageBlobUploadPagesExceptionHeaders = { serializedName: "PageBlob_uploadPagesExceptionHeaders", type: { name: "Composite", @@ -69214,7 +70440,7 @@ const PageBlobUploadPagesExceptionHeaders = { } } }; -const PageBlobClearPagesHeaders = { +var PageBlobClearPagesHeaders = { serializedName: "PageBlob_clearPagesHeaders", type: { name: "Composite", @@ -69293,7 +70519,7 @@ const PageBlobClearPagesHeaders = { } } }; -const PageBlobClearPagesExceptionHeaders = { +var PageBlobClearPagesExceptionHeaders = { serializedName: "PageBlob_clearPagesExceptionHeaders", type: { name: "Composite", @@ -69309,7 +70535,7 @@ const PageBlobClearPagesExceptionHeaders = { } } }; -const PageBlobUploadPagesFromURLHeaders = { +var PageBlobUploadPagesFromURLHeaders = { serializedName: "PageBlob_uploadPagesFromURLHeaders", type: { name: "Composite", @@ -69402,7 +70628,7 @@ const PageBlobUploadPagesFromURLHeaders = { } } }; -const PageBlobUploadPagesFromURLExceptionHeaders = { +var PageBlobUploadPagesFromURLExceptionHeaders = { serializedName: "PageBlob_uploadPagesFromURLExceptionHeaders", type: { name: "Composite", @@ -69418,7 +70644,7 @@ const PageBlobUploadPagesFromURLExceptionHeaders = { } } }; -const PageBlobGetPageRangesHeaders = { +var PageBlobGetPageRangesHeaders = { serializedName: "PageBlob_getPageRangesHeaders", type: { name: "Composite", @@ -69483,7 +70709,7 @@ const PageBlobGetPageRangesHeaders = { } } }; -const PageBlobGetPageRangesExceptionHeaders = { +var PageBlobGetPageRangesExceptionHeaders = { serializedName: "PageBlob_getPageRangesExceptionHeaders", type: { name: "Composite", @@ -69499,7 +70725,7 @@ const PageBlobGetPageRangesExceptionHeaders = { } } }; -const PageBlobGetPageRangesDiffHeaders = { +var PageBlobGetPageRangesDiffHeaders = { serializedName: "PageBlob_getPageRangesDiffHeaders", type: { name: "Composite", @@ -69564,7 +70790,7 @@ const PageBlobGetPageRangesDiffHeaders = { } } }; -const PageBlobGetPageRangesDiffExceptionHeaders = { +var PageBlobGetPageRangesDiffExceptionHeaders = { serializedName: "PageBlob_getPageRangesDiffExceptionHeaders", type: { name: "Composite", @@ -69580,7 +70806,7 @@ const PageBlobGetPageRangesDiffExceptionHeaders = { } } }; -const PageBlobResizeHeaders = { +var PageBlobResizeHeaders = { serializedName: "PageBlob_resizeHeaders", type: { name: "Composite", @@ -69645,7 +70871,7 @@ const PageBlobResizeHeaders = { } } }; -const PageBlobResizeExceptionHeaders = { +var PageBlobResizeExceptionHeaders = { serializedName: "PageBlob_resizeExceptionHeaders", type: { name: "Composite", @@ -69661,7 +70887,7 @@ const PageBlobResizeExceptionHeaders = { } } }; -const PageBlobUpdateSequenceNumberHeaders = { +var PageBlobUpdateSequenceNumberHeaders = { serializedName: "PageBlob_updateSequenceNumberHeaders", type: { name: "Composite", @@ -69726,7 +70952,7 @@ const PageBlobUpdateSequenceNumberHeaders = { } } }; -const PageBlobUpdateSequenceNumberExceptionHeaders = { +var PageBlobUpdateSequenceNumberExceptionHeaders = { serializedName: "PageBlob_updateSequenceNumberExceptionHeaders", type: { name: "Composite", @@ -69742,7 +70968,7 @@ const PageBlobUpdateSequenceNumberExceptionHeaders = { } } }; -const PageBlobCopyIncrementalHeaders = { +var PageBlobCopyIncrementalHeaders = { serializedName: "PageBlob_copyIncrementalHeaders", type: { name: "Composite", @@ -69815,7 +71041,7 @@ const PageBlobCopyIncrementalHeaders = { } } }; -const PageBlobCopyIncrementalExceptionHeaders = { +var PageBlobCopyIncrementalExceptionHeaders = { serializedName: "PageBlob_copyIncrementalExceptionHeaders", type: { name: "Composite", @@ -69831,7 +71057,7 @@ const PageBlobCopyIncrementalExceptionHeaders = { } } }; -const AppendBlobCreateHeaders = { +var AppendBlobCreateHeaders = { serializedName: "AppendBlob_createHeaders", type: { name: "Composite", @@ -69924,7 +71150,7 @@ const AppendBlobCreateHeaders = { } } }; -const AppendBlobCreateExceptionHeaders = { +var AppendBlobCreateExceptionHeaders = { serializedName: "AppendBlob_createExceptionHeaders", type: { name: "Composite", @@ -69940,7 +71166,7 @@ const AppendBlobCreateExceptionHeaders = { } } }; -const AppendBlobAppendBlockHeaders = { +var AppendBlobAppendBlockHeaders = { serializedName: "AppendBlob_appendBlockHeaders", type: { name: "Composite", @@ -70047,7 +71273,7 @@ const AppendBlobAppendBlockHeaders = { } } }; -const AppendBlobAppendBlockExceptionHeaders = { +var AppendBlobAppendBlockExceptionHeaders = { serializedName: "AppendBlob_appendBlockExceptionHeaders", type: { name: "Composite", @@ -70063,7 +71289,7 @@ const AppendBlobAppendBlockExceptionHeaders = { } } }; -const AppendBlobAppendBlockFromUrlHeaders = { +var AppendBlobAppendBlockFromUrlHeaders = { serializedName: "AppendBlob_appendBlockFromUrlHeaders", type: { name: "Composite", @@ -70163,7 +71389,7 @@ const AppendBlobAppendBlockFromUrlHeaders = { } } }; -const AppendBlobAppendBlockFromUrlExceptionHeaders = { +var AppendBlobAppendBlockFromUrlExceptionHeaders = { serializedName: "AppendBlob_appendBlockFromUrlExceptionHeaders", type: { name: "Composite", @@ -70179,7 +71405,7 @@ const AppendBlobAppendBlockFromUrlExceptionHeaders = { } } }; -const AppendBlobSealHeaders = { +var AppendBlobSealHeaders = { serializedName: "AppendBlob_sealHeaders", type: { name: "Composite", @@ -70237,7 +71463,7 @@ const AppendBlobSealHeaders = { } } }; -const AppendBlobSealExceptionHeaders = { +var AppendBlobSealExceptionHeaders = { serializedName: "AppendBlob_sealExceptionHeaders", type: { name: "Composite", @@ -70253,7 +71479,7 @@ const AppendBlobSealExceptionHeaders = { } } }; -const BlockBlobUploadHeaders = { +var BlockBlobUploadHeaders = { serializedName: "BlockBlob_uploadHeaders", type: { name: "Composite", @@ -70346,7 +71572,7 @@ const BlockBlobUploadHeaders = { } } }; -const BlockBlobUploadExceptionHeaders = { +var BlockBlobUploadExceptionHeaders = { serializedName: "BlockBlob_uploadExceptionHeaders", type: { name: "Composite", @@ -70362,7 +71588,7 @@ const BlockBlobUploadExceptionHeaders = { } } }; -const BlockBlobPutBlobFromUrlHeaders = { +var BlockBlobPutBlobFromUrlHeaders = { serializedName: "BlockBlob_putBlobFromUrlHeaders", type: { name: "Composite", @@ -70455,7 +71681,7 @@ const BlockBlobPutBlobFromUrlHeaders = { } } }; -const BlockBlobPutBlobFromUrlExceptionHeaders = { +var BlockBlobPutBlobFromUrlExceptionHeaders = { serializedName: "BlockBlob_putBlobFromUrlExceptionHeaders", type: { name: "Composite", @@ -70471,7 +71697,7 @@ const BlockBlobPutBlobFromUrlExceptionHeaders = { } } }; -const BlockBlobStageBlockHeaders = { +var BlockBlobStageBlockHeaders = { serializedName: "BlockBlob_stageBlockHeaders", type: { name: "Composite", @@ -70550,7 +71776,7 @@ const BlockBlobStageBlockHeaders = { } } }; -const BlockBlobStageBlockExceptionHeaders = { +var BlockBlobStageBlockExceptionHeaders = { serializedName: "BlockBlob_stageBlockExceptionHeaders", type: { name: "Composite", @@ -70566,7 +71792,7 @@ const BlockBlobStageBlockExceptionHeaders = { } } }; -const BlockBlobStageBlockFromURLHeaders = { +var BlockBlobStageBlockFromURLHeaders = { serializedName: "BlockBlob_stageBlockFromURLHeaders", type: { name: "Composite", @@ -70645,7 +71871,7 @@ const BlockBlobStageBlockFromURLHeaders = { } } }; -const BlockBlobStageBlockFromURLExceptionHeaders = { +var BlockBlobStageBlockFromURLExceptionHeaders = { serializedName: "BlockBlob_stageBlockFromURLExceptionHeaders", type: { name: "Composite", @@ -70661,7 +71887,7 @@ const BlockBlobStageBlockFromURLExceptionHeaders = { } } }; -const BlockBlobCommitBlockListHeaders = { +var BlockBlobCommitBlockListHeaders = { serializedName: "BlockBlob_commitBlockListHeaders", type: { name: "Composite", @@ -70761,7 +71987,7 @@ const BlockBlobCommitBlockListHeaders = { } } }; -const BlockBlobCommitBlockListExceptionHeaders = { +var BlockBlobCommitBlockListExceptionHeaders = { serializedName: "BlockBlob_commitBlockListExceptionHeaders", type: { name: "Composite", @@ -70777,7 +72003,7 @@ const BlockBlobCommitBlockListExceptionHeaders = { } } }; -const BlockBlobGetBlockListHeaders = { +var BlockBlobGetBlockListHeaders = { serializedName: "BlockBlob_getBlockListHeaders", type: { name: "Composite", @@ -70849,7 +72075,7 @@ const BlockBlobGetBlockListHeaders = { } } }; -const BlockBlobGetBlockListExceptionHeaders = { +var BlockBlobGetBlockListExceptionHeaders = { serializedName: "BlockBlob_getBlockListExceptionHeaders", type: { name: "Composite", @@ -71065,7 +72291,7 @@ var Mappers = /*#__PURE__*/Object.freeze({ * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -const contentType = { +var contentType = { parameterPath: ["options", "contentType"], mapper: { defaultValue: "application/xml", @@ -71076,11 +72302,11 @@ const contentType = { } } }; -const blobServiceProperties = { +var blobServiceProperties = { parameterPath: "blobServiceProperties", mapper: BlobServiceProperties }; -const accept = { +var accept = { parameterPath: "accept", mapper: { defaultValue: "application/xml", @@ -71091,7 +72317,7 @@ const accept = { } } }; -const url = { +var url = { parameterPath: "url", mapper: { serializedName: "url", @@ -71103,7 +72329,7 @@ const url = { }, skipEncoding: true }; -const restype = { +var restype = { parameterPath: "restype", mapper: { defaultValue: "service", @@ -71114,7 +72340,7 @@ const restype = { } } }; -const comp = { +var comp = { parameterPath: "comp", mapper: { defaultValue: "properties", @@ -71125,7 +72351,7 @@ const comp = { } } }; -const timeoutInSeconds = { +var timeoutInSeconds = { parameterPath: ["options", "timeoutInSeconds"], mapper: { constraints: { @@ -71138,7 +72364,7 @@ const timeoutInSeconds = { } } }; -const version = { +var version = { parameterPath: "version", mapper: { defaultValue: "2020-08-04", @@ -71149,7 +72375,7 @@ const version = { } } }; -const requestId = { +var requestId = { parameterPath: ["options", "requestId"], mapper: { serializedName: "x-ms-client-request-id", @@ -71159,7 +72385,7 @@ const requestId = { } } }; -const accept1 = { +var accept1 = { parameterPath: "accept", mapper: { defaultValue: "application/xml", @@ -71170,7 +72396,7 @@ const accept1 = { } } }; -const comp1 = { +var comp1 = { parameterPath: "comp", mapper: { defaultValue: "stats", @@ -71181,7 +72407,7 @@ const comp1 = { } } }; -const comp2 = { +var comp2 = { parameterPath: "comp", mapper: { defaultValue: "list", @@ -71192,7 +72418,7 @@ const comp2 = { } } }; -const prefix = { +var prefix = { parameterPath: ["options", "prefix"], mapper: { serializedName: "prefix", @@ -71202,7 +72428,7 @@ const prefix = { } } }; -const marker = { +var marker = { parameterPath: ["options", "marker"], mapper: { serializedName: "marker", @@ -71212,7 +72438,7 @@ const marker = { } } }; -const maxPageSize = { +var maxPageSize = { parameterPath: ["options", "maxPageSize"], mapper: { constraints: { @@ -71225,7 +72451,7 @@ const maxPageSize = { } } }; -const include = { +var include = { parameterPath: ["options", "include"], mapper: { serializedName: "include", @@ -71243,11 +72469,11 @@ const include = { }, collectionFormat: coreHttp.QueryCollectionFormat.Csv }; -const keyInfo = { +var keyInfo = { parameterPath: "keyInfo", mapper: KeyInfo }; -const comp3 = { +var comp3 = { parameterPath: "comp", mapper: { defaultValue: "userdelegationkey", @@ -71258,7 +72484,7 @@ const comp3 = { } } }; -const restype1 = { +var restype1 = { parameterPath: "restype", mapper: { defaultValue: "account", @@ -71269,7 +72495,7 @@ const restype1 = { } } }; -const body = { +var body = { parameterPath: "body", mapper: { serializedName: "body", @@ -71280,7 +72506,7 @@ const body = { } } }; -const comp4 = { +var comp4 = { parameterPath: "comp", mapper: { defaultValue: "batch", @@ -71291,7 +72517,7 @@ const comp4 = { } } }; -const contentLength = { +var contentLength = { parameterPath: "contentLength", mapper: { serializedName: "Content-Length", @@ -71302,7 +72528,7 @@ const contentLength = { } } }; -const multipartContentType = { +var multipartContentType = { parameterPath: "multipartContentType", mapper: { serializedName: "Content-Type", @@ -71313,7 +72539,7 @@ const multipartContentType = { } } }; -const comp5 = { +var comp5 = { parameterPath: "comp", mapper: { defaultValue: "blobs", @@ -71324,7 +72550,7 @@ const comp5 = { } } }; -const where = { +var where = { parameterPath: ["options", "where"], mapper: { serializedName: "where", @@ -71334,7 +72560,7 @@ const where = { } } }; -const restype2 = { +var restype2 = { parameterPath: "restype", mapper: { defaultValue: "container", @@ -71345,7 +72571,7 @@ const restype2 = { } } }; -const metadata = { +var metadata = { parameterPath: ["options", "metadata"], mapper: { serializedName: "x-ms-meta", @@ -71357,7 +72583,7 @@ const metadata = { headerCollectionPrefix: "x-ms-meta-" } }; -const access = { +var access = { parameterPath: ["options", "access"], mapper: { serializedName: "x-ms-blob-public-access", @@ -71368,7 +72594,7 @@ const access = { } } }; -const defaultEncryptionScope = { +var defaultEncryptionScope = { parameterPath: [ "options", "containerEncryptionScope", @@ -71382,7 +72608,7 @@ const defaultEncryptionScope = { } } }; -const preventEncryptionScopeOverride = { +var preventEncryptionScopeOverride = { parameterPath: [ "options", "containerEncryptionScope", @@ -71396,7 +72622,7 @@ const preventEncryptionScopeOverride = { } } }; -const leaseId = { +var leaseId = { parameterPath: ["options", "leaseAccessConditions", "leaseId"], mapper: { serializedName: "x-ms-lease-id", @@ -71406,7 +72632,7 @@ const leaseId = { } } }; -const ifModifiedSince = { +var ifModifiedSince = { parameterPath: ["options", "modifiedAccessConditions", "ifModifiedSince"], mapper: { serializedName: "If-Modified-Since", @@ -71416,7 +72642,7 @@ const ifModifiedSince = { } } }; -const ifUnmodifiedSince = { +var ifUnmodifiedSince = { parameterPath: ["options", "modifiedAccessConditions", "ifUnmodifiedSince"], mapper: { serializedName: "If-Unmodified-Since", @@ -71426,7 +72652,7 @@ const ifUnmodifiedSince = { } } }; -const comp6 = { +var comp6 = { parameterPath: "comp", mapper: { defaultValue: "metadata", @@ -71437,7 +72663,7 @@ const comp6 = { } } }; -const comp7 = { +var comp7 = { parameterPath: "comp", mapper: { defaultValue: "acl", @@ -71448,7 +72674,7 @@ const comp7 = { } } }; -const containerAcl = { +var containerAcl = { parameterPath: ["options", "containerAcl"], mapper: { serializedName: "containerAcl", @@ -71466,7 +72692,7 @@ const containerAcl = { } } }; -const comp8 = { +var comp8 = { parameterPath: "comp", mapper: { defaultValue: "undelete", @@ -71477,7 +72703,7 @@ const comp8 = { } } }; -const deletedContainerName = { +var deletedContainerName = { parameterPath: ["options", "deletedContainerName"], mapper: { serializedName: "x-ms-deleted-container-name", @@ -71487,7 +72713,7 @@ const deletedContainerName = { } } }; -const deletedContainerVersion = { +var deletedContainerVersion = { parameterPath: ["options", "deletedContainerVersion"], mapper: { serializedName: "x-ms-deleted-container-version", @@ -71497,7 +72723,7 @@ const deletedContainerVersion = { } } }; -const comp9 = { +var comp9 = { parameterPath: "comp", mapper: { defaultValue: "rename", @@ -71508,7 +72734,7 @@ const comp9 = { } } }; -const sourceContainerName = { +var sourceContainerName = { parameterPath: "sourceContainerName", mapper: { serializedName: "x-ms-source-container-name", @@ -71519,7 +72745,7 @@ const sourceContainerName = { } } }; -const sourceLeaseId = { +var sourceLeaseId = { parameterPath: ["options", "sourceLeaseId"], mapper: { serializedName: "x-ms-source-lease-id", @@ -71529,7 +72755,7 @@ const sourceLeaseId = { } } }; -const comp10 = { +var comp10 = { parameterPath: "comp", mapper: { defaultValue: "lease", @@ -71540,7 +72766,7 @@ const comp10 = { } } }; -const action = { +var action = { parameterPath: "action", mapper: { defaultValue: "acquire", @@ -71551,7 +72777,7 @@ const action = { } } }; -const duration = { +var duration = { parameterPath: ["options", "duration"], mapper: { serializedName: "x-ms-lease-duration", @@ -71561,7 +72787,7 @@ const duration = { } } }; -const proposedLeaseId = { +var proposedLeaseId = { parameterPath: ["options", "proposedLeaseId"], mapper: { serializedName: "x-ms-proposed-lease-id", @@ -71571,7 +72797,7 @@ const proposedLeaseId = { } } }; -const action1 = { +var action1 = { parameterPath: "action", mapper: { defaultValue: "release", @@ -71582,7 +72808,7 @@ const action1 = { } } }; -const leaseId1 = { +var leaseId1 = { parameterPath: "leaseId", mapper: { serializedName: "x-ms-lease-id", @@ -71593,7 +72819,7 @@ const leaseId1 = { } } }; -const action2 = { +var action2 = { parameterPath: "action", mapper: { defaultValue: "renew", @@ -71604,7 +72830,7 @@ const action2 = { } } }; -const action3 = { +var action3 = { parameterPath: "action", mapper: { defaultValue: "break", @@ -71615,7 +72841,7 @@ const action3 = { } } }; -const breakPeriod = { +var breakPeriod = { parameterPath: ["options", "breakPeriod"], mapper: { serializedName: "x-ms-lease-break-period", @@ -71625,7 +72851,7 @@ const breakPeriod = { } } }; -const action4 = { +var action4 = { parameterPath: "action", mapper: { defaultValue: "change", @@ -71636,7 +72862,7 @@ const action4 = { } } }; -const proposedLeaseId1 = { +var proposedLeaseId1 = { parameterPath: "proposedLeaseId", mapper: { serializedName: "x-ms-proposed-lease-id", @@ -71647,7 +72873,7 @@ const proposedLeaseId1 = { } } }; -const include1 = { +var include1 = { parameterPath: ["options", "include"], mapper: { serializedName: "include", @@ -71673,7 +72899,7 @@ const include1 = { }, collectionFormat: coreHttp.QueryCollectionFormat.Csv }; -const delimiter = { +var delimiter = { parameterPath: "delimiter", mapper: { serializedName: "delimiter", @@ -71684,7 +72910,7 @@ const delimiter = { } } }; -const directoryProperties = { +var directoryProperties = { parameterPath: ["options", "directoryProperties"], mapper: { serializedName: "x-ms-properties", @@ -71694,7 +72920,7 @@ const directoryProperties = { } } }; -const posixPermissions = { +var posixPermissions = { parameterPath: ["options", "posixPermissions"], mapper: { serializedName: "x-ms-permissions", @@ -71704,7 +72930,7 @@ const posixPermissions = { } } }; -const posixUmask = { +var posixUmask = { parameterPath: ["options", "posixUmask"], mapper: { serializedName: "x-ms-umask", @@ -71714,7 +72940,7 @@ const posixUmask = { } } }; -const cacheControl = { +var cacheControl = { parameterPath: ["options", "directoryHttpHeaders", "cacheControl"], mapper: { serializedName: "x-ms-cache-control", @@ -71724,7 +72950,7 @@ const cacheControl = { } } }; -const contentType1 = { +var contentType1 = { parameterPath: ["options", "directoryHttpHeaders", "contentType"], mapper: { serializedName: "x-ms-content-type", @@ -71734,7 +72960,7 @@ const contentType1 = { } } }; -const contentEncoding = { +var contentEncoding = { parameterPath: ["options", "directoryHttpHeaders", "contentEncoding"], mapper: { serializedName: "x-ms-content-encoding", @@ -71744,7 +72970,7 @@ const contentEncoding = { } } }; -const contentLanguage = { +var contentLanguage = { parameterPath: ["options", "directoryHttpHeaders", "contentLanguage"], mapper: { serializedName: "x-ms-content-language", @@ -71754,7 +72980,7 @@ const contentLanguage = { } } }; -const contentDisposition = { +var contentDisposition = { parameterPath: ["options", "directoryHttpHeaders", "contentDisposition"], mapper: { serializedName: "x-ms-content-disposition", @@ -71764,7 +72990,7 @@ const contentDisposition = { } } }; -const ifMatch = { +var ifMatch = { parameterPath: ["options", "modifiedAccessConditions", "ifMatch"], mapper: { serializedName: "If-Match", @@ -71774,7 +73000,7 @@ const ifMatch = { } } }; -const ifNoneMatch = { +var ifNoneMatch = { parameterPath: ["options", "modifiedAccessConditions", "ifNoneMatch"], mapper: { serializedName: "If-None-Match", @@ -71784,7 +73010,7 @@ const ifNoneMatch = { } } }; -const pathRenameMode = { +var pathRenameMode = { parameterPath: ["options", "pathRenameMode"], mapper: { serializedName: "mode", @@ -71795,7 +73021,7 @@ const pathRenameMode = { } } }; -const renameSource = { +var renameSource = { parameterPath: "renameSource", mapper: { serializedName: "x-ms-rename-source", @@ -71806,7 +73032,7 @@ const renameSource = { } } }; -const sourceIfModifiedSince = { +var sourceIfModifiedSince = { parameterPath: [ "options", "sourceModifiedAccessConditions", @@ -71820,7 +73046,7 @@ const sourceIfModifiedSince = { } } }; -const sourceIfUnmodifiedSince = { +var sourceIfUnmodifiedSince = { parameterPath: [ "options", "sourceModifiedAccessConditions", @@ -71834,7 +73060,7 @@ const sourceIfUnmodifiedSince = { } } }; -const sourceIfMatch = { +var sourceIfMatch = { parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfMatch"], mapper: { serializedName: "x-ms-source-if-match", @@ -71844,7 +73070,7 @@ const sourceIfMatch = { } } }; -const sourceIfNoneMatch = { +var sourceIfNoneMatch = { parameterPath: [ "options", "sourceModifiedAccessConditions", @@ -71858,7 +73084,7 @@ const sourceIfNoneMatch = { } } }; -const action5 = { +var action5 = { parameterPath: "action", mapper: { defaultValue: "setAccessControl", @@ -71869,7 +73095,7 @@ const action5 = { } } }; -const owner = { +var owner = { parameterPath: ["options", "owner"], mapper: { serializedName: "x-ms-owner", @@ -71879,7 +73105,7 @@ const owner = { } } }; -const group = { +var group = { parameterPath: ["options", "group"], mapper: { serializedName: "x-ms-group", @@ -71889,7 +73115,7 @@ const group = { } } }; -const posixAcl = { +var posixAcl = { parameterPath: ["options", "posixAcl"], mapper: { serializedName: "x-ms-acl", @@ -71899,7 +73125,7 @@ const posixAcl = { } } }; -const action6 = { +var action6 = { parameterPath: "action", mapper: { defaultValue: "getAccessControl", @@ -71910,7 +73136,7 @@ const action6 = { } } }; -const upn = { +var upn = { parameterPath: ["options", "upn"], mapper: { serializedName: "upn", @@ -71920,7 +73146,7 @@ const upn = { } } }; -const snapshot = { +var snapshot = { parameterPath: ["options", "snapshot"], mapper: { serializedName: "snapshot", @@ -71930,7 +73156,7 @@ const snapshot = { } } }; -const versionId = { +var versionId = { parameterPath: ["options", "versionId"], mapper: { serializedName: "versionid", @@ -71940,7 +73166,7 @@ const versionId = { } } }; -const range = { +var range = { parameterPath: ["options", "range"], mapper: { serializedName: "x-ms-range", @@ -71950,7 +73176,7 @@ const range = { } } }; -const rangeGetContentMD5 = { +var rangeGetContentMD5 = { parameterPath: ["options", "rangeGetContentMD5"], mapper: { serializedName: "x-ms-range-get-content-md5", @@ -71960,7 +73186,7 @@ const rangeGetContentMD5 = { } } }; -const rangeGetContentCRC64 = { +var rangeGetContentCRC64 = { parameterPath: ["options", "rangeGetContentCRC64"], mapper: { serializedName: "x-ms-range-get-content-crc64", @@ -71970,7 +73196,7 @@ const rangeGetContentCRC64 = { } } }; -const encryptionKey = { +var encryptionKey = { parameterPath: ["options", "cpkInfo", "encryptionKey"], mapper: { serializedName: "x-ms-encryption-key", @@ -71980,7 +73206,7 @@ const encryptionKey = { } } }; -const encryptionKeySha256 = { +var encryptionKeySha256 = { parameterPath: ["options", "cpkInfo", "encryptionKeySha256"], mapper: { serializedName: "x-ms-encryption-key-sha256", @@ -71990,7 +73216,7 @@ const encryptionKeySha256 = { } } }; -const encryptionAlgorithm = { +var encryptionAlgorithm = { parameterPath: ["options", "encryptionAlgorithm"], mapper: { defaultValue: "AES256", @@ -72001,7 +73227,7 @@ const encryptionAlgorithm = { } } }; -const ifTags = { +var ifTags = { parameterPath: ["options", "modifiedAccessConditions", "ifTags"], mapper: { serializedName: "x-ms-if-tags", @@ -72011,7 +73237,7 @@ const ifTags = { } } }; -const deleteSnapshots = { +var deleteSnapshots = { parameterPath: ["options", "deleteSnapshots"], mapper: { serializedName: "x-ms-delete-snapshots", @@ -72022,7 +73248,7 @@ const deleteSnapshots = { } } }; -const blobDeleteType = { +var blobDeleteType = { parameterPath: ["options", "blobDeleteType"], mapper: { serializedName: "deletetype", @@ -72032,7 +73258,7 @@ const blobDeleteType = { } } }; -const comp11 = { +var comp11 = { parameterPath: "comp", mapper: { defaultValue: "expiry", @@ -72043,7 +73269,7 @@ const comp11 = { } } }; -const expiryOptions = { +var expiryOptions = { parameterPath: "expiryOptions", mapper: { serializedName: "x-ms-expiry-option", @@ -72054,7 +73280,7 @@ const expiryOptions = { } } }; -const expiresOn = { +var expiresOn = { parameterPath: ["options", "expiresOn"], mapper: { serializedName: "x-ms-expiry-time", @@ -72064,7 +73290,7 @@ const expiresOn = { } } }; -const blobCacheControl = { +var blobCacheControl = { parameterPath: ["options", "blobHttpHeaders", "blobCacheControl"], mapper: { serializedName: "x-ms-blob-cache-control", @@ -72074,7 +73300,7 @@ const blobCacheControl = { } } }; -const blobContentType = { +var blobContentType = { parameterPath: ["options", "blobHttpHeaders", "blobContentType"], mapper: { serializedName: "x-ms-blob-content-type", @@ -72084,7 +73310,7 @@ const blobContentType = { } } }; -const blobContentMD5 = { +var blobContentMD5 = { parameterPath: ["options", "blobHttpHeaders", "blobContentMD5"], mapper: { serializedName: "x-ms-blob-content-md5", @@ -72094,7 +73320,7 @@ const blobContentMD5 = { } } }; -const blobContentEncoding = { +var blobContentEncoding = { parameterPath: ["options", "blobHttpHeaders", "blobContentEncoding"], mapper: { serializedName: "x-ms-blob-content-encoding", @@ -72104,7 +73330,7 @@ const blobContentEncoding = { } } }; -const blobContentLanguage = { +var blobContentLanguage = { parameterPath: ["options", "blobHttpHeaders", "blobContentLanguage"], mapper: { serializedName: "x-ms-blob-content-language", @@ -72114,7 +73340,7 @@ const blobContentLanguage = { } } }; -const blobContentDisposition = { +var blobContentDisposition = { parameterPath: ["options", "blobHttpHeaders", "blobContentDisposition"], mapper: { serializedName: "x-ms-blob-content-disposition", @@ -72124,7 +73350,7 @@ const blobContentDisposition = { } } }; -const encryptionScope = { +var encryptionScope = { parameterPath: ["options", "encryptionScope"], mapper: { serializedName: "x-ms-encryption-scope", @@ -72134,7 +73360,7 @@ const encryptionScope = { } } }; -const comp12 = { +var comp12 = { parameterPath: "comp", mapper: { defaultValue: "snapshot", @@ -72145,7 +73371,7 @@ const comp12 = { } } }; -const tier = { +var tier = { parameterPath: ["options", "tier"], mapper: { serializedName: "x-ms-access-tier", @@ -72171,7 +73397,7 @@ const tier = { } } }; -const rehydratePriority = { +var rehydratePriority = { parameterPath: ["options", "rehydratePriority"], mapper: { serializedName: "x-ms-rehydrate-priority", @@ -72182,7 +73408,7 @@ const rehydratePriority = { } } }; -const sourceIfTags = { +var sourceIfTags = { parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfTags"], mapper: { serializedName: "x-ms-source-if-tags", @@ -72192,7 +73418,7 @@ const sourceIfTags = { } } }; -const copySource = { +var copySource = { parameterPath: "copySource", mapper: { serializedName: "x-ms-copy-source", @@ -72203,7 +73429,7 @@ const copySource = { } } }; -const blobTagsString = { +var blobTagsString = { parameterPath: ["options", "blobTagsString"], mapper: { serializedName: "x-ms-tags", @@ -72213,7 +73439,7 @@ const blobTagsString = { } } }; -const sealBlob = { +var sealBlob = { parameterPath: ["options", "sealBlob"], mapper: { serializedName: "x-ms-seal-blob", @@ -72223,7 +73449,7 @@ const sealBlob = { } } }; -const xMsRequiresSync = { +var xMsRequiresSync = { parameterPath: "xMsRequiresSync", mapper: { defaultValue: "true", @@ -72234,7 +73460,7 @@ const xMsRequiresSync = { } } }; -const sourceContentMD5 = { +var sourceContentMD5 = { parameterPath: ["options", "sourceContentMD5"], mapper: { serializedName: "x-ms-source-content-md5", @@ -72244,7 +73470,7 @@ const sourceContentMD5 = { } } }; -const comp13 = { +var comp13 = { parameterPath: "comp", mapper: { defaultValue: "copy", @@ -72255,7 +73481,7 @@ const comp13 = { } } }; -const copyActionAbortConstant = { +var copyActionAbortConstant = { parameterPath: "copyActionAbortConstant", mapper: { defaultValue: "abort", @@ -72266,7 +73492,7 @@ const copyActionAbortConstant = { } } }; -const copyId = { +var copyId = { parameterPath: "copyId", mapper: { serializedName: "copyid", @@ -72277,7 +73503,7 @@ const copyId = { } } }; -const comp14 = { +var comp14 = { parameterPath: "comp", mapper: { defaultValue: "tier", @@ -72288,7 +73514,7 @@ const comp14 = { } } }; -const tier1 = { +var tier1 = { parameterPath: "tier", mapper: { serializedName: "x-ms-access-tier", @@ -72315,11 +73541,11 @@ const tier1 = { } } }; -const queryRequest = { +var queryRequest = { parameterPath: ["options", "queryRequest"], mapper: QueryRequest }; -const comp15 = { +var comp15 = { parameterPath: "comp", mapper: { defaultValue: "query", @@ -72330,7 +73556,7 @@ const comp15 = { } } }; -const comp16 = { +var comp16 = { parameterPath: "comp", mapper: { defaultValue: "tags", @@ -72341,11 +73567,11 @@ const comp16 = { } } }; -const tags = { +var tags = { parameterPath: ["options", "tags"], mapper: BlobTags }; -const transactionalContentMD5 = { +var transactionalContentMD5 = { parameterPath: ["options", "transactionalContentMD5"], mapper: { serializedName: "Content-MD5", @@ -72355,7 +73581,7 @@ const transactionalContentMD5 = { } } }; -const transactionalContentCrc64 = { +var transactionalContentCrc64 = { parameterPath: ["options", "transactionalContentCrc64"], mapper: { serializedName: "x-ms-content-crc64", @@ -72365,7 +73591,7 @@ const transactionalContentCrc64 = { } } }; -const blobType = { +var blobType = { parameterPath: "blobType", mapper: { defaultValue: "PageBlob", @@ -72376,7 +73602,7 @@ const blobType = { } } }; -const blobContentLength = { +var blobContentLength = { parameterPath: "blobContentLength", mapper: { serializedName: "x-ms-blob-content-length", @@ -72387,7 +73613,7 @@ const blobContentLength = { } } }; -const blobSequenceNumber = { +var blobSequenceNumber = { parameterPath: ["options", "blobSequenceNumber"], mapper: { serializedName: "x-ms-blob-sequence-number", @@ -72397,7 +73623,7 @@ const blobSequenceNumber = { } } }; -const contentType2 = { +var contentType2 = { parameterPath: ["options", "contentType"], mapper: { defaultValue: "application/octet-stream", @@ -72408,7 +73634,7 @@ const contentType2 = { } } }; -const body1 = { +var body1 = { parameterPath: "body", mapper: { serializedName: "body", @@ -72419,7 +73645,7 @@ const body1 = { } } }; -const accept2 = { +var accept2 = { parameterPath: "accept", mapper: { defaultValue: "application/xml", @@ -72430,7 +73656,7 @@ const accept2 = { } } }; -const comp17 = { +var comp17 = { parameterPath: "comp", mapper: { defaultValue: "page", @@ -72441,7 +73667,7 @@ const comp17 = { } } }; -const pageWrite = { +var pageWrite = { parameterPath: "pageWrite", mapper: { defaultValue: "update", @@ -72452,7 +73678,7 @@ const pageWrite = { } } }; -const ifSequenceNumberLessThanOrEqualTo = { +var ifSequenceNumberLessThanOrEqualTo = { parameterPath: [ "options", "sequenceNumberAccessConditions", @@ -72466,7 +73692,7 @@ const ifSequenceNumberLessThanOrEqualTo = { } } }; -const ifSequenceNumberLessThan = { +var ifSequenceNumberLessThan = { parameterPath: [ "options", "sequenceNumberAccessConditions", @@ -72480,7 +73706,7 @@ const ifSequenceNumberLessThan = { } } }; -const ifSequenceNumberEqualTo = { +var ifSequenceNumberEqualTo = { parameterPath: [ "options", "sequenceNumberAccessConditions", @@ -72494,7 +73720,7 @@ const ifSequenceNumberEqualTo = { } } }; -const pageWrite1 = { +var pageWrite1 = { parameterPath: "pageWrite", mapper: { defaultValue: "clear", @@ -72505,7 +73731,7 @@ const pageWrite1 = { } } }; -const sourceUrl = { +var sourceUrl = { parameterPath: "sourceUrl", mapper: { serializedName: "x-ms-copy-source", @@ -72516,7 +73742,7 @@ const sourceUrl = { } } }; -const sourceRange = { +var sourceRange = { parameterPath: "sourceRange", mapper: { serializedName: "x-ms-source-range", @@ -72527,7 +73753,7 @@ const sourceRange = { } } }; -const sourceContentCrc64 = { +var sourceContentCrc64 = { parameterPath: ["options", "sourceContentCrc64"], mapper: { serializedName: "x-ms-source-content-crc64", @@ -72537,7 +73763,7 @@ const sourceContentCrc64 = { } } }; -const range1 = { +var range1 = { parameterPath: "range", mapper: { serializedName: "x-ms-range", @@ -72548,7 +73774,7 @@ const range1 = { } } }; -const comp18 = { +var comp18 = { parameterPath: "comp", mapper: { defaultValue: "pagelist", @@ -72559,7 +73785,7 @@ const comp18 = { } } }; -const prevsnapshot = { +var prevsnapshot = { parameterPath: ["options", "prevsnapshot"], mapper: { serializedName: "prevsnapshot", @@ -72569,7 +73795,7 @@ const prevsnapshot = { } } }; -const prevSnapshotUrl = { +var prevSnapshotUrl = { parameterPath: ["options", "prevSnapshotUrl"], mapper: { serializedName: "x-ms-previous-snapshot-url", @@ -72579,7 +73805,7 @@ const prevSnapshotUrl = { } } }; -const sequenceNumberAction = { +var sequenceNumberAction = { parameterPath: "sequenceNumberAction", mapper: { serializedName: "x-ms-sequence-number-action", @@ -72591,7 +73817,7 @@ const sequenceNumberAction = { } } }; -const comp19 = { +var comp19 = { parameterPath: "comp", mapper: { defaultValue: "incrementalcopy", @@ -72602,7 +73828,7 @@ const comp19 = { } } }; -const blobType1 = { +var blobType1 = { parameterPath: "blobType", mapper: { defaultValue: "AppendBlob", @@ -72613,7 +73839,7 @@ const blobType1 = { } } }; -const comp20 = { +var comp20 = { parameterPath: "comp", mapper: { defaultValue: "appendblock", @@ -72624,7 +73850,7 @@ const comp20 = { } } }; -const maxSize = { +var maxSize = { parameterPath: ["options", "appendPositionAccessConditions", "maxSize"], mapper: { serializedName: "x-ms-blob-condition-maxsize", @@ -72634,7 +73860,7 @@ const maxSize = { } } }; -const appendPosition = { +var appendPosition = { parameterPath: [ "options", "appendPositionAccessConditions", @@ -72648,7 +73874,7 @@ const appendPosition = { } } }; -const sourceRange1 = { +var sourceRange1 = { parameterPath: ["options", "sourceRange"], mapper: { serializedName: "x-ms-source-range", @@ -72658,7 +73884,7 @@ const sourceRange1 = { } } }; -const comp21 = { +var comp21 = { parameterPath: "comp", mapper: { defaultValue: "seal", @@ -72669,7 +73895,7 @@ const comp21 = { } } }; -const blobType2 = { +var blobType2 = { parameterPath: "blobType", mapper: { defaultValue: "BlockBlob", @@ -72680,7 +73906,7 @@ const blobType2 = { } } }; -const copySourceBlobProperties = { +var copySourceBlobProperties = { parameterPath: ["options", "copySourceBlobProperties"], mapper: { serializedName: "x-ms-copy-source-blob-properties", @@ -72690,7 +73916,7 @@ const copySourceBlobProperties = { } } }; -const comp22 = { +var comp22 = { parameterPath: "comp", mapper: { defaultValue: "block", @@ -72701,7 +73927,7 @@ const comp22 = { } } }; -const blockId = { +var blockId = { parameterPath: "blockId", mapper: { serializedName: "blockid", @@ -72712,11 +73938,11 @@ const blockId = { } } }; -const blocks = { +var blocks = { parameterPath: "blocks", mapper: BlockLookupList }; -const comp23 = { +var comp23 = { parameterPath: "comp", mapper: { defaultValue: "blocklist", @@ -72727,7 +73953,7 @@ const comp23 = { } } }; -const listType = { +var listType = { parameterPath: "listType", mapper: { defaultValue: "committed", @@ -72749,12 +73975,12 @@ const listType = { * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ /** Class representing a Service. */ -class Service { +var Service = /** @class */ (function () { /** * Initialize a new instance of the class Service class. * @param client Reference to the service client */ - constructor(client) { + function Service(client) { this.client = client; } /** @@ -72763,69 +73989,69 @@ class Service { * @param blobServiceProperties The StorageService properties. * @param options The options parameters. */ - setProperties(blobServiceProperties, options) { - const operationArguments = { - blobServiceProperties, + Service.prototype.setProperties = function (blobServiceProperties, options) { + var operationArguments = { + blobServiceProperties: blobServiceProperties, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setPropertiesOperationSpec); - } + }; /** * gets the properties of a storage account's Blob service, including properties for Storage Analytics * and CORS (Cross-Origin Resource Sharing) rules. * @param options The options parameters. */ - getProperties(options) { - const operationArguments = { + Service.prototype.getProperties = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); - } + }; /** * Retrieves statistics related to replication for the Blob service. It is only available on the * secondary location endpoint when read-access geo-redundant replication is enabled for the storage * account. * @param options The options parameters. */ - getStatistics(options) { - const operationArguments = { + Service.prototype.getStatistics = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getStatisticsOperationSpec); - } + }; /** * The List Containers Segment operation returns a list of the containers under the specified account * @param options The options parameters. */ - listContainersSegment(options) { - const operationArguments = { + Service.prototype.listContainersSegment = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, listContainersSegmentOperationSpec); - } + }; /** * Retrieves a user delegation key for the Blob service. This is only a valid operation when using * bearer token authentication. * @param keyInfo Key information * @param options The options parameters. */ - getUserDelegationKey(keyInfo, options) { - const operationArguments = { - keyInfo, + Service.prototype.getUserDelegationKey = function (keyInfo, options) { + var operationArguments = { + keyInfo: keyInfo, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getUserDelegationKeyOperationSpec); - } + }; /** * Returns the sku name and account kind * @param options The options parameters. */ - getAccountInfo(options) { - const operationArguments = { + Service.prototype.getAccountInfo = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); - } + }; /** * The Batch operation allows multiple API calls to be embedded into a single HTTP request. * @param contentLength The length of the request. @@ -72834,31 +74060,32 @@ class Service { * @param body Initial data * @param options The options parameters. */ - submitBatch(contentLength, multipartContentType, body, options) { - const operationArguments = { - contentLength, - multipartContentType, - body, + Service.prototype.submitBatch = function (contentLength, multipartContentType, body, options) { + var operationArguments = { + contentLength: contentLength, + multipartContentType: multipartContentType, + body: body, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec); - } + }; /** * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a * given search expression. Filter blobs searches across all containers within a storage account but * can be scoped within the expression to a single container. * @param options The options parameters. */ - filterBlobs(options) { - const operationArguments = { + Service.prototype.filterBlobs = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec); - } -} + }; + return Service; +}()); // Operation Specifications -const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); -const setPropertiesOperationSpec = { +var xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); +var setPropertiesOperationSpec = { path: "/", httpMethod: "PUT", responses: { @@ -72888,7 +74115,7 @@ const setPropertiesOperationSpec = { mediaType: "xml", serializer: xmlSerializer }; -const getPropertiesOperationSpec = { +var getPropertiesOperationSpec = { path: "/", httpMethod: "GET", responses: { @@ -72915,7 +74142,7 @@ const getPropertiesOperationSpec = { isXML: true, serializer: xmlSerializer }; -const getStatisticsOperationSpec = { +var getStatisticsOperationSpec = { path: "/", httpMethod: "GET", responses: { @@ -72942,7 +74169,7 @@ const getStatisticsOperationSpec = { isXML: true, serializer: xmlSerializer }; -const listContainersSegmentOperationSpec = { +var listContainersSegmentOperationSpec = { path: "/", httpMethod: "GET", responses: { @@ -72972,7 +74199,7 @@ const listContainersSegmentOperationSpec = { isXML: true, serializer: xmlSerializer }; -const getUserDelegationKeyOperationSpec = { +var getUserDelegationKeyOperationSpec = { path: "/", httpMethod: "POST", responses: { @@ -73003,7 +74230,7 @@ const getUserDelegationKeyOperationSpec = { mediaType: "xml", serializer: xmlSerializer }; -const getAccountInfoOperationSpec = { +var getAccountInfoOperationSpec = { path: "/", httpMethod: "GET", responses: { @@ -73021,7 +74248,7 @@ const getAccountInfoOperationSpec = { isXML: true, serializer: xmlSerializer }; -const submitBatchOperationSpec = { +var submitBatchOperationSpec = { path: "/", httpMethod: "POST", responses: { @@ -73053,7 +74280,7 @@ const submitBatchOperationSpec = { mediaType: "xml", serializer: xmlSerializer }; -const filterBlobsOperationSpec = { +var filterBlobsOperationSpec = { path: "/", httpMethod: "GET", responses: { @@ -73091,12 +74318,12 @@ const filterBlobsOperationSpec = { * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ /** Class representing a Container. */ -class Container { +var Container = /** @class */ (function () { /** * Initialize a new instance of the class Container class. * @param client Reference to the service client */ - constructor(client) { + function Container(client) { this.client = client; } /** @@ -73104,88 +74331,88 @@ class Container { * exists, the operation fails * @param options The options parameters. */ - create(options) { - const operationArguments = { + Container.prototype.create = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, createOperationSpec); - } + }; /** * returns all user-defined metadata and system properties for the specified container. The data * returned does not include the container's list of blobs * @param options The options parameters. */ - getProperties(options) { - const operationArguments = { + Container.prototype.getProperties = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$1); - } + }; /** * operation marks the specified container for deletion. The container and any blobs contained within * it are later deleted during garbage collection * @param options The options parameters. */ - delete(options) { - const operationArguments = { + Container.prototype.delete = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, deleteOperationSpec); - } + }; /** * operation sets one or more user-defined name-value pairs for the specified container. * @param options The options parameters. */ - setMetadata(options) { - const operationArguments = { + Container.prototype.setMetadata = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec); - } + }; /** * gets the permissions for the specified container. The permissions indicate whether container data * may be accessed publicly. * @param options The options parameters. */ - getAccessPolicy(options) { - const operationArguments = { + Container.prototype.getAccessPolicy = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getAccessPolicyOperationSpec); - } + }; /** * sets the permissions for the specified container. The permissions indicate whether blobs in a * container may be accessed publicly. * @param options The options parameters. */ - setAccessPolicy(options) { - const operationArguments = { + Container.prototype.setAccessPolicy = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setAccessPolicyOperationSpec); - } + }; /** * Restores a previously-deleted container. * @param options The options parameters. */ - restore(options) { - const operationArguments = { + Container.prototype.restore = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, restoreOperationSpec); - } + }; /** * Renames an existing container. * @param sourceContainerName Required. Specifies the name of the container to rename. * @param options The options parameters. */ - rename(sourceContainerName, options) { - const operationArguments = { - sourceContainerName, + Container.prototype.rename = function (sourceContainerName, options) { + var operationArguments = { + sourceContainerName: sourceContainerName, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, renameOperationSpec); - } + }; /** * The Batch operation allows multiple API calls to be embedded into a single HTTP request. * @param contentLength The length of the request. @@ -73194,63 +74421,63 @@ class Container { * @param body Initial data * @param options The options parameters. */ - submitBatch(contentLength, multipartContentType, body, options) { - const operationArguments = { - contentLength, - multipartContentType, - body, + Container.prototype.submitBatch = function (contentLength, multipartContentType, body, options) { + var operationArguments = { + contentLength: contentLength, + multipartContentType: multipartContentType, + body: body, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec$1); - } + }; /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can * be 15 to 60 seconds, or can be infinite * @param options The options parameters. */ - acquireLease(options) { - const operationArguments = { + Container.prototype.acquireLease = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec); - } + }; /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can * be 15 to 60 seconds, or can be infinite * @param leaseId Specifies the current lease ID on the resource. * @param options The options parameters. */ - releaseLease(leaseId, options) { - const operationArguments = { - leaseId, + Container.prototype.releaseLease = function (leaseId, options) { + var operationArguments = { + leaseId: leaseId, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec); - } + }; /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can * be 15 to 60 seconds, or can be infinite * @param leaseId Specifies the current lease ID on the resource. * @param options The options parameters. */ - renewLease(leaseId, options) { - const operationArguments = { - leaseId, + Container.prototype.renewLease = function (leaseId, options) { + var operationArguments = { + leaseId: leaseId, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec); - } + }; /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can * be 15 to 60 seconds, or can be infinite * @param options The options parameters. */ - breakLease(options) { - const operationArguments = { + Container.prototype.breakLease = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec); - } + }; /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can * be 15 to 60 seconds, or can be infinite @@ -73260,24 +74487,24 @@ class Container { * (String) for a list of valid GUID string formats. * @param options The options parameters. */ - changeLease(leaseId, proposedLeaseId, options) { - const operationArguments = { - leaseId, - proposedLeaseId, + Container.prototype.changeLease = function (leaseId, proposedLeaseId, options) { + var operationArguments = { + leaseId: leaseId, + proposedLeaseId: proposedLeaseId, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec); - } + }; /** * [Update] The List Blobs operation returns a list of the blobs under the specified container * @param options The options parameters. */ - listBlobFlatSegment(options) { - const operationArguments = { + Container.prototype.listBlobFlatSegment = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, listBlobFlatSegmentOperationSpec); - } + }; /** * [Update] The List Blobs operation returns a list of the blobs under the specified container * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix @@ -73286,27 +74513,28 @@ class Container { * character or a string. * @param options The options parameters. */ - listBlobHierarchySegment(delimiter, options) { - const operationArguments = { - delimiter, + Container.prototype.listBlobHierarchySegment = function (delimiter, options) { + var operationArguments = { + delimiter: delimiter, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, listBlobHierarchySegmentOperationSpec); - } + }; /** * Returns the sku name and account kind * @param options The options parameters. */ - getAccountInfo(options) { - const operationArguments = { + Container.prototype.getAccountInfo = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$1); - } -} + }; + return Container; +}()); // Operation Specifications -const xmlSerializer$1 = new coreHttp.Serializer(Mappers, /* isXml */ true); -const createOperationSpec = { +var xmlSerializer$1 = new coreHttp.Serializer(Mappers, /* isXml */ true); +var createOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -73332,7 +74560,7 @@ const createOperationSpec = { isXML: true, serializer: xmlSerializer$1 }; -const getPropertiesOperationSpec$1 = { +var getPropertiesOperationSpec$1 = { path: "/{containerName}", httpMethod: "GET", responses: { @@ -73355,7 +74583,7 @@ const getPropertiesOperationSpec$1 = { isXML: true, serializer: xmlSerializer$1 }; -const deleteOperationSpec = { +var deleteOperationSpec = { path: "/{containerName}", httpMethod: "DELETE", responses: { @@ -73380,7 +74608,7 @@ const deleteOperationSpec = { isXML: true, serializer: xmlSerializer$1 }; -const setMetadataOperationSpec = { +var setMetadataOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -73409,7 +74637,7 @@ const setMetadataOperationSpec = { isXML: true, serializer: xmlSerializer$1 }; -const getAccessPolicyOperationSpec = { +var getAccessPolicyOperationSpec = { path: "/{containerName}", httpMethod: "GET", responses: { @@ -73448,7 +74676,7 @@ const getAccessPolicyOperationSpec = { isXML: true, serializer: xmlSerializer$1 }; -const setAccessPolicyOperationSpec = { +var setAccessPolicyOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -73482,7 +74710,7 @@ const setAccessPolicyOperationSpec = { mediaType: "xml", serializer: xmlSerializer$1 }; -const restoreOperationSpec = { +var restoreOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -73510,7 +74738,7 @@ const restoreOperationSpec = { isXML: true, serializer: xmlSerializer$1 }; -const renameOperationSpec = { +var renameOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -73538,7 +74766,7 @@ const renameOperationSpec = { isXML: true, serializer: xmlSerializer$1 }; -const submitBatchOperationSpec$1 = { +var submitBatchOperationSpec$1 = { path: "/{containerName}", httpMethod: "POST", responses: { @@ -73574,7 +74802,7 @@ const submitBatchOperationSpec$1 = { mediaType: "xml", serializer: xmlSerializer$1 }; -const acquireLeaseOperationSpec = { +var acquireLeaseOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -73605,7 +74833,7 @@ const acquireLeaseOperationSpec = { isXML: true, serializer: xmlSerializer$1 }; -const releaseLeaseOperationSpec = { +var releaseLeaseOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -73635,7 +74863,7 @@ const releaseLeaseOperationSpec = { isXML: true, serializer: xmlSerializer$1 }; -const renewLeaseOperationSpec = { +var renewLeaseOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -73665,7 +74893,7 @@ const renewLeaseOperationSpec = { isXML: true, serializer: xmlSerializer$1 }; -const breakLeaseOperationSpec = { +var breakLeaseOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -73695,7 +74923,7 @@ const breakLeaseOperationSpec = { isXML: true, serializer: xmlSerializer$1 }; -const changeLeaseOperationSpec = { +var changeLeaseOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -73726,7 +74954,7 @@ const changeLeaseOperationSpec = { isXML: true, serializer: xmlSerializer$1 }; -const listBlobFlatSegmentOperationSpec = { +var listBlobFlatSegmentOperationSpec = { path: "/{containerName}", httpMethod: "GET", responses: { @@ -73757,7 +74985,7 @@ const listBlobFlatSegmentOperationSpec = { isXML: true, serializer: xmlSerializer$1 }; -const listBlobHierarchySegmentOperationSpec = { +var listBlobHierarchySegmentOperationSpec = { path: "/{containerName}", httpMethod: "GET", responses: { @@ -73789,7 +75017,7 @@ const listBlobHierarchySegmentOperationSpec = { isXML: true, serializer: xmlSerializer$1 }; -const getAccountInfoOperationSpec$1 = { +var getAccountInfoOperationSpec$1 = { path: "/{containerName}", httpMethod: "GET", responses: { @@ -73816,12 +75044,12 @@ const getAccountInfoOperationSpec$1 = { * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ /** Class representing a Blob. */ -class Blob$1 { +var Blob$1 = /** @class */ (function () { /** * Initialize a new instance of the class Blob class. * @param client Reference to the service client */ - constructor(client) { + function Blob(client) { this.client = client; } /** @@ -73829,23 +75057,23 @@ class Blob$1 { * properties. You can also call Download to read a snapshot. * @param options The options parameters. */ - download(options) { - const operationArguments = { + Blob.prototype.download = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, downloadOperationSpec); - } + }; /** * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system * properties for the blob. It does not return the content of the blob. * @param options The options parameters. */ - getProperties(options) { - const operationArguments = { + Blob.prototype.getProperties = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$2); - } + }; /** * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is * permanently removed from the storage account. If the storage account's soft delete feature is @@ -73861,32 +75089,32 @@ class Blob$1 { * (ResourceNotFound). * @param options The options parameters. */ - delete(options) { - const operationArguments = { + Blob.prototype.delete = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, deleteOperationSpec$1); - } + }; /** * Set the owner, group, permissions, or access control list for a blob. * @param options The options parameters. */ - setAccessControl(options) { - const operationArguments = { + Blob.prototype.setAccessControl = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setAccessControlOperationSpec); - } + }; /** * Get the owner, group, permissions, or access control list for a blob. * @param options The options parameters. */ - getAccessControl(options) { - const operationArguments = { + Blob.prototype.getAccessControl = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getAccessControlOperationSpec); - } + }; /** * Rename a blob/file. By default, the destination is overwritten and if the destination already * exists and has a lease the lease is broken. This operation supports conditional HTTP requests. For @@ -73898,93 +75126,93 @@ class Blob$1 { * existing properties; otherwise, the existing properties will be preserved. * @param options The options parameters. */ - rename(renameSource, options) { - const operationArguments = { - renameSource, + Blob.prototype.rename = function (renameSource, options) { + var operationArguments = { + renameSource: renameSource, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, renameOperationSpec$1); - } + }; /** * Undelete a blob that was previously soft deleted * @param options The options parameters. */ - undelete(options) { - const operationArguments = { + Blob.prototype.undelete = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, undeleteOperationSpec); - } + }; /** * Sets the time a blob will expire and be deleted. * @param expiryOptions Required. Indicates mode of the expiry time * @param options The options parameters. */ - setExpiry(expiryOptions, options) { - const operationArguments = { - expiryOptions, + Blob.prototype.setExpiry = function (expiryOptions, options) { + var operationArguments = { + expiryOptions: expiryOptions, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setExpiryOperationSpec); - } + }; /** * The Set HTTP Headers operation sets system properties on the blob * @param options The options parameters. */ - setHttpHeaders(options) { - const operationArguments = { + Blob.prototype.setHttpHeaders = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setHttpHeadersOperationSpec); - } + }; /** * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more * name-value pairs * @param options The options parameters. */ - setMetadata(options) { - const operationArguments = { + Blob.prototype.setMetadata = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec$1); - } + }; /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete * operations * @param options The options parameters. */ - acquireLease(options) { - const operationArguments = { + Blob.prototype.acquireLease = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec$1); - } + }; /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete * operations * @param leaseId Specifies the current lease ID on the resource. * @param options The options parameters. */ - releaseLease(leaseId, options) { - const operationArguments = { - leaseId, + Blob.prototype.releaseLease = function (leaseId, options) { + var operationArguments = { + leaseId: leaseId, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec$1); - } + }; /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete * operations * @param leaseId Specifies the current lease ID on the resource. * @param options The options parameters. */ - renewLease(leaseId, options) { - const operationArguments = { - leaseId, + Blob.prototype.renewLease = function (leaseId, options) { + var operationArguments = { + leaseId: leaseId, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec$1); - } + }; /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete * operations @@ -73994,35 +75222,35 @@ class Blob$1 { * (String) for a list of valid GUID string formats. * @param options The options parameters. */ - changeLease(leaseId, proposedLeaseId, options) { - const operationArguments = { - leaseId, - proposedLeaseId, + Blob.prototype.changeLease = function (leaseId, proposedLeaseId, options) { + var operationArguments = { + leaseId: leaseId, + proposedLeaseId: proposedLeaseId, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec$1); - } + }; /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete * operations * @param options The options parameters. */ - breakLease(options) { - const operationArguments = { + Blob.prototype.breakLease = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec$1); - } + }; /** * The Create Snapshot operation creates a read-only snapshot of a blob * @param options The options parameters. */ - createSnapshot(options) { - const operationArguments = { + Blob.prototype.createSnapshot = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, createSnapshotOperationSpec); - } + }; /** * The Start Copy From URL operation copies a blob or an internet resource to a new blob. * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to @@ -74031,13 +75259,13 @@ class Blob$1 { * access signature. * @param options The options parameters. */ - startCopyFromURL(copySource, options) { - const operationArguments = { - copySource, + Blob.prototype.startCopyFromURL = function (copySource, options) { + var operationArguments = { + copySource: copySource, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, startCopyFromURLOperationSpec); - } + }; /** * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return * a response until the copy is complete. @@ -74047,13 +75275,13 @@ class Blob$1 { * access signature. * @param options The options parameters. */ - copyFromURL(copySource, options) { - const operationArguments = { - copySource, + Blob.prototype.copyFromURL = function (copySource, options) { + var operationArguments = { + copySource: copySource, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, copyFromURLOperationSpec); - } + }; /** * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination * blob with zero length and full metadata. @@ -74061,13 +75289,13 @@ class Blob$1 { * operation. * @param options The options parameters. */ - abortCopyFromURL(copyId, options) { - const operationArguments = { - copyId, + Blob.prototype.abortCopyFromURL = function (copyId, options) { + var operationArguments = { + copyId: copyId, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, abortCopyFromURLOperationSpec); - } + }; /** * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium * storage account and on a block blob in a blob storage account (locally redundant storage only). A @@ -74077,58 +75305,59 @@ class Blob$1 { * @param tier Indicates the tier to be set on the blob. * @param options The options parameters. */ - setTier(tier, options) { - const operationArguments = { - tier, + Blob.prototype.setTier = function (tier, options) { + var operationArguments = { + tier: tier, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setTierOperationSpec); - } + }; /** * Returns the sku name and account kind * @param options The options parameters. */ - getAccountInfo(options) { - const operationArguments = { + Blob.prototype.getAccountInfo = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$2); - } + }; /** * The Query operation enables users to select/project on blob data by providing simple query * expressions. * @param options The options parameters. */ - query(options) { - const operationArguments = { + Blob.prototype.query = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, queryOperationSpec); - } + }; /** * The Get Tags operation enables users to get the tags associated with a blob. * @param options The options parameters. */ - getTags(options) { - const operationArguments = { + Blob.prototype.getTags = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getTagsOperationSpec); - } + }; /** * The Set Tags operation enables users to set tags on a blob. * @param options The options parameters. */ - setTags(options) { - const operationArguments = { + Blob.prototype.setTags = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setTagsOperationSpec); - } -} + }; + return Blob; +}()); // Operation Specifications -const xmlSerializer$2 = new coreHttp.Serializer(Mappers, /* isXml */ true); -const downloadOperationSpec = { +var xmlSerializer$2 = new coreHttp.Serializer(Mappers, /* isXml */ true); +var downloadOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { @@ -74177,7 +75406,7 @@ const downloadOperationSpec = { isXML: true, serializer: xmlSerializer$2 }; -const getPropertiesOperationSpec$2 = { +var getPropertiesOperationSpec$2 = { path: "/{containerName}/{blob}", httpMethod: "HEAD", responses: { @@ -74212,7 +75441,7 @@ const getPropertiesOperationSpec$2 = { isXML: true, serializer: xmlSerializer$2 }; -const deleteOperationSpec$1 = { +var deleteOperationSpec$1 = { path: "/{containerName}/{blob}", httpMethod: "DELETE", responses: { @@ -74246,7 +75475,7 @@ const deleteOperationSpec$1 = { isXML: true, serializer: xmlSerializer$2 }; -const setAccessControlOperationSpec = { +var setAccessControlOperationSpec = { path: "/{filesystem}/{path}", httpMethod: "PATCH", responses: { @@ -74277,7 +75506,7 @@ const setAccessControlOperationSpec = { isXML: true, serializer: xmlSerializer$2 }; -const getAccessControlOperationSpec = { +var getAccessControlOperationSpec = { path: "/{filesystem}/{path}", httpMethod: "HEAD", responses: { @@ -74308,7 +75537,7 @@ const getAccessControlOperationSpec = { isXML: true, serializer: xmlSerializer$2 }; -const renameOperationSpec$1 = { +var renameOperationSpec$1 = { path: "/{filesystem}/{path}", httpMethod: "PUT", responses: { @@ -74349,7 +75578,7 @@ const renameOperationSpec$1 = { isXML: true, serializer: xmlSerializer$2 }; -const undeleteOperationSpec = { +var undeleteOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -74371,7 +75600,7 @@ const undeleteOperationSpec = { isXML: true, serializer: xmlSerializer$2 }; -const setExpiryOperationSpec = { +var setExpiryOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -74395,7 +75624,7 @@ const setExpiryOperationSpec = { isXML: true, serializer: xmlSerializer$2 }; -const setHttpHeadersOperationSpec = { +var setHttpHeadersOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -74429,7 +75658,7 @@ const setHttpHeadersOperationSpec = { isXML: true, serializer: xmlSerializer$2 }; -const setMetadataOperationSpec$1 = { +var setMetadataOperationSpec$1 = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -74462,7 +75691,7 @@ const setMetadataOperationSpec$1 = { isXML: true, serializer: xmlSerializer$2 }; -const acquireLeaseOperationSpec$1 = { +var acquireLeaseOperationSpec$1 = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -74492,7 +75721,7 @@ const acquireLeaseOperationSpec$1 = { isXML: true, serializer: xmlSerializer$2 }; -const releaseLeaseOperationSpec$1 = { +var releaseLeaseOperationSpec$1 = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -74521,7 +75750,7 @@ const releaseLeaseOperationSpec$1 = { isXML: true, serializer: xmlSerializer$2 }; -const renewLeaseOperationSpec$1 = { +var renewLeaseOperationSpec$1 = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -74550,7 +75779,7 @@ const renewLeaseOperationSpec$1 = { isXML: true, serializer: xmlSerializer$2 }; -const changeLeaseOperationSpec$1 = { +var changeLeaseOperationSpec$1 = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -74580,7 +75809,7 @@ const changeLeaseOperationSpec$1 = { isXML: true, serializer: xmlSerializer$2 }; -const breakLeaseOperationSpec$1 = { +var breakLeaseOperationSpec$1 = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -74609,7 +75838,7 @@ const breakLeaseOperationSpec$1 = { isXML: true, serializer: xmlSerializer$2 }; -const createSnapshotOperationSpec = { +var createSnapshotOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -74642,7 +75871,7 @@ const createSnapshotOperationSpec = { isXML: true, serializer: xmlSerializer$2 }; -const startCopyFromURLOperationSpec = { +var startCopyFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -74681,7 +75910,7 @@ const startCopyFromURLOperationSpec = { isXML: true, serializer: xmlSerializer$2 }; -const copyFromURLOperationSpec = { +var copyFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -74719,7 +75948,7 @@ const copyFromURLOperationSpec = { isXML: true, serializer: xmlSerializer$2 }; -const abortCopyFromURLOperationSpec = { +var abortCopyFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -74747,7 +75976,7 @@ const abortCopyFromURLOperationSpec = { isXML: true, serializer: xmlSerializer$2 }; -const setTierOperationSpec = { +var setTierOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -74781,7 +76010,7 @@ const setTierOperationSpec = { isXML: true, serializer: xmlSerializer$2 }; -const getAccountInfoOperationSpec$2 = { +var getAccountInfoOperationSpec$2 = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { @@ -74799,7 +76028,7 @@ const getAccountInfoOperationSpec$2 = { isXML: true, serializer: xmlSerializer$2 }; -const queryOperationSpec = { +var queryOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "POST", responses: { @@ -74849,7 +76078,7 @@ const queryOperationSpec = { mediaType: "xml", serializer: xmlSerializer$2 }; -const getTagsOperationSpec = { +var getTagsOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { @@ -74879,7 +76108,7 @@ const getTagsOperationSpec = { isXML: true, serializer: xmlSerializer$2 }; -const setTagsOperationSpec = { +var setTagsOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -74922,12 +76151,12 @@ const setTagsOperationSpec = { * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ /** Class representing a PageBlob. */ -class PageBlob { +var PageBlob = /** @class */ (function () { /** * Initialize a new instance of the class PageBlob class. * @param client Reference to the service client */ - constructor(client) { + function PageBlob(client) { this.client = client; } /** @@ -74937,40 +76166,40 @@ class PageBlob { * page blob size must be aligned to a 512-byte boundary. * @param options The options parameters. */ - create(contentLength, blobContentLength, options) { - const operationArguments = { - contentLength, - blobContentLength, + PageBlob.prototype.create = function (contentLength, blobContentLength, options) { + var operationArguments = { + contentLength: contentLength, + blobContentLength: blobContentLength, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, createOperationSpec$1); - } + }; /** * The Upload Pages operation writes a range of pages to a page blob * @param contentLength The length of the request. * @param body Initial data * @param options The options parameters. */ - uploadPages(contentLength, body, options) { - const operationArguments = { - contentLength, - body, + PageBlob.prototype.uploadPages = function (contentLength, body, options) { + var operationArguments = { + contentLength: contentLength, + body: body, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, uploadPagesOperationSpec); - } + }; /** * The Clear Pages operation clears a set of pages from a page blob * @param contentLength The length of the request. * @param options The options parameters. */ - clearPages(contentLength, options) { - const operationArguments = { - contentLength, + PageBlob.prototype.clearPages = function (contentLength, options) { + var operationArguments = { + contentLength: contentLength, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, clearPagesOperationSpec); - } + }; /** * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a * URL @@ -74982,51 +76211,51 @@ class PageBlob { * aligned and range-end is required. * @param options The options parameters. */ - uploadPagesFromURL(sourceUrl, sourceRange, contentLength, range, options) { - const operationArguments = { - sourceUrl, - sourceRange, - contentLength, - range, + PageBlob.prototype.uploadPagesFromURL = function (sourceUrl, sourceRange, contentLength, range, options) { + var operationArguments = { + sourceUrl: sourceUrl, + sourceRange: sourceRange, + contentLength: contentLength, + range: range, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, uploadPagesFromURLOperationSpec); - } + }; /** * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a * page blob * @param options The options parameters. */ - getPageRanges(options) { - const operationArguments = { + PageBlob.prototype.getPageRanges = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getPageRangesOperationSpec); - } + }; /** * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were * changed between target blob and previous snapshot. * @param options The options parameters. */ - getPageRangesDiff(options) { - const operationArguments = { + PageBlob.prototype.getPageRangesDiff = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getPageRangesDiffOperationSpec); - } + }; /** * Resize the Blob * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The * page blob size must be aligned to a 512-byte boundary. * @param options The options parameters. */ - resize(blobContentLength, options) { - const operationArguments = { - blobContentLength, + PageBlob.prototype.resize = function (blobContentLength, options) { + var operationArguments = { + blobContentLength: blobContentLength, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, resizeOperationSpec); - } + }; /** * Update the sequence number of the blob * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request. @@ -75034,13 +76263,13 @@ class PageBlob { * blob's sequence number * @param options The options parameters. */ - updateSequenceNumber(sequenceNumberAction, options) { - const operationArguments = { - sequenceNumberAction, + PageBlob.prototype.updateSequenceNumber = function (sequenceNumberAction, options) { + var operationArguments = { + sequenceNumberAction: sequenceNumberAction, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, updateSequenceNumberOperationSpec); - } + }; /** * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. * The snapshot is copied such that only the differential changes between the previously copied @@ -75053,18 +76282,19 @@ class PageBlob { * access signature. * @param options The options parameters. */ - copyIncremental(copySource, options) { - const operationArguments = { - copySource, + PageBlob.prototype.copyIncremental = function (copySource, options) { + var operationArguments = { + copySource: copySource, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, copyIncrementalOperationSpec); - } -} + }; + return PageBlob; +}()); // Operation Specifications -const xmlSerializer$3 = new coreHttp.Serializer(Mappers, /* isXml */ true); -const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); -const createOperationSpec$1 = { +var xmlSerializer$3 = new coreHttp.Serializer(Mappers, /* isXml */ true); +var serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); +var createOperationSpec$1 = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -75109,7 +76339,7 @@ const createOperationSpec$1 = { isXML: true, serializer: xmlSerializer$3 }; -const uploadPagesOperationSpec = { +var uploadPagesOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -75149,9 +76379,9 @@ const uploadPagesOperationSpec = { ifSequenceNumberEqualTo ], mediaType: "binary", - serializer + serializer: serializer }; -const clearPagesOperationSpec = { +var clearPagesOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -75189,7 +76419,7 @@ const clearPagesOperationSpec = { isXML: true, serializer: xmlSerializer$3 }; -const uploadPagesFromURLOperationSpec = { +var uploadPagesFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -75235,7 +76465,7 @@ const uploadPagesFromURLOperationSpec = { isXML: true, serializer: xmlSerializer$3 }; -const getPageRangesOperationSpec = { +var getPageRangesOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { @@ -75269,7 +76499,7 @@ const getPageRangesOperationSpec = { isXML: true, serializer: xmlSerializer$3 }; -const getPageRangesDiffOperationSpec = { +var getPageRangesDiffOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { @@ -75305,7 +76535,7 @@ const getPageRangesDiffOperationSpec = { isXML: true, serializer: xmlSerializer$3 }; -const resizeOperationSpec = { +var resizeOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -75338,7 +76568,7 @@ const resizeOperationSpec = { isXML: true, serializer: xmlSerializer$3 }; -const updateSequenceNumberOperationSpec = { +var updateSequenceNumberOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -75368,7 +76598,7 @@ const updateSequenceNumberOperationSpec = { isXML: true, serializer: xmlSerializer$3 }; -const copyIncrementalOperationSpec = { +var copyIncrementalOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -75405,12 +76635,12 @@ const copyIncrementalOperationSpec = { * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ /** Class representing a AppendBlob. */ -class AppendBlob { +var AppendBlob = /** @class */ (function () { /** * Initialize a new instance of the class AppendBlob class. * @param client Reference to the service client */ - constructor(client) { + function AppendBlob(client) { this.client = client; } /** @@ -75418,13 +76648,13 @@ class AppendBlob { * @param contentLength The length of the request. * @param options The options parameters. */ - create(contentLength, options) { - const operationArguments = { - contentLength, + AppendBlob.prototype.create = function (contentLength, options) { + var operationArguments = { + contentLength: contentLength, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, createOperationSpec$2); - } + }; /** * The Append Block operation commits a new block of data to the end of an existing append blob. The * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to @@ -75433,14 +76663,14 @@ class AppendBlob { * @param body Initial data * @param options The options parameters. */ - appendBlock(contentLength, body, options) { - const operationArguments = { - contentLength, - body, + AppendBlob.prototype.appendBlock = function (contentLength, body, options) { + var operationArguments = { + contentLength: contentLength, + body: body, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, appendBlockOperationSpec); - } + }; /** * The Append Block operation commits a new block of data to the end of an existing append blob where * the contents are read from a source url. The Append Block operation is permitted only if the blob @@ -75450,30 +76680,31 @@ class AppendBlob { * @param contentLength The length of the request. * @param options The options parameters. */ - appendBlockFromUrl(sourceUrl, contentLength, options) { - const operationArguments = { - sourceUrl, - contentLength, + AppendBlob.prototype.appendBlockFromUrl = function (sourceUrl, contentLength, options) { + var operationArguments = { + sourceUrl: sourceUrl, + contentLength: contentLength, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, appendBlockFromUrlOperationSpec); - } + }; /** * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version * 2019-12-12 version or later. * @param options The options parameters. */ - seal(options) { - const operationArguments = { + AppendBlob.prototype.seal = function (options) { + var operationArguments = { options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, sealOperationSpec); - } -} + }; + return AppendBlob; +}()); // Operation Specifications -const xmlSerializer$4 = new coreHttp.Serializer(Mappers, /* isXml */ true); -const serializer$1 = new coreHttp.Serializer(Mappers, /* isXml */ false); -const createOperationSpec$2 = { +var xmlSerializer$4 = new coreHttp.Serializer(Mappers, /* isXml */ true); +var serializer$1 = new coreHttp.Serializer(Mappers, /* isXml */ false); +var createOperationSpec$2 = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -75515,7 +76746,7 @@ const createOperationSpec$2 = { isXML: true, serializer: xmlSerializer$4 }; -const appendBlockOperationSpec = { +var appendBlockOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -75554,7 +76785,7 @@ const appendBlockOperationSpec = { mediaType: "binary", serializer: serializer$1 }; -const appendBlockFromUrlOperationSpec = { +var appendBlockFromUrlOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -75598,7 +76829,7 @@ const appendBlockFromUrlOperationSpec = { isXML: true, serializer: xmlSerializer$4 }; -const sealOperationSpec = { +var sealOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -75635,12 +76866,12 @@ const sealOperationSpec = { * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ /** Class representing a BlockBlob. */ -class BlockBlob { +var BlockBlob = /** @class */ (function () { /** * Initialize a new instance of the class BlockBlob class. * @param client Reference to the service client */ - constructor(client) { + function BlockBlob(client) { this.client = client; } /** @@ -75652,14 +76883,14 @@ class BlockBlob { * @param body Initial data * @param options The options parameters. */ - upload(contentLength, body, options) { - const operationArguments = { - contentLength, - body, + BlockBlob.prototype.upload = function (contentLength, body, options) { + var operationArguments = { + contentLength: contentLength, + body: body, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, uploadOperationSpec); - } + }; /** * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are @@ -75673,14 +76904,14 @@ class BlockBlob { * access signature. * @param options The options parameters. */ - putBlobFromUrl(contentLength, copySource, options) { - const operationArguments = { - contentLength, - copySource, + BlockBlob.prototype.putBlobFromUrl = function (contentLength, copySource, options) { + var operationArguments = { + contentLength: contentLength, + copySource: copySource, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, putBlobFromUrlOperationSpec); - } + }; /** * The Stage Block operation creates a new block to be committed as part of a blob * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string @@ -75690,15 +76921,15 @@ class BlockBlob { * @param body Initial data * @param options The options parameters. */ - stageBlock(blockId, contentLength, body, options) { - const operationArguments = { - blockId, - contentLength, - body, + BlockBlob.prototype.stageBlock = function (blockId, contentLength, body, options) { + var operationArguments = { + blockId: blockId, + contentLength: contentLength, + body: body, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, stageBlockOperationSpec); - } + }; /** * The Stage Block operation creates a new block to be committed as part of a blob where the contents * are read from a URL. @@ -75709,15 +76940,15 @@ class BlockBlob { * @param sourceUrl Specify a URL to the copy source. * @param options The options parameters. */ - stageBlockFromURL(blockId, contentLength, sourceUrl, options) { - const operationArguments = { - blockId, - contentLength, - sourceUrl, + BlockBlob.prototype.stageBlockFromURL = function (blockId, contentLength, sourceUrl, options) { + var operationArguments = { + blockId: blockId, + contentLength: contentLength, + sourceUrl: sourceUrl, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, stageBlockFromURLOperationSpec); - } + }; /** * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the * blob. In order to be written as part of a blob, a block must have been successfully written to the @@ -75729,13 +76960,13 @@ class BlockBlob { * @param blocks * @param options The options parameters. */ - commitBlockList(blocks, options) { - const operationArguments = { - blocks, + BlockBlob.prototype.commitBlockList = function (blocks, options) { + var operationArguments = { + blocks: blocks, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, commitBlockListOperationSpec); - } + }; /** * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block * blob @@ -75743,18 +76974,19 @@ class BlockBlob { * blocks, or both lists together. * @param options The options parameters. */ - getBlockList(listType, options) { - const operationArguments = { - listType, + BlockBlob.prototype.getBlockList = function (listType, options) { + var operationArguments = { + listType: listType, options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getBlockListOperationSpec); - } -} + }; + return BlockBlob; +}()); // Operation Specifications -const xmlSerializer$5 = new coreHttp.Serializer(Mappers, /* isXml */ true); -const serializer$2 = new coreHttp.Serializer(Mappers, /* isXml */ false); -const uploadOperationSpec = { +var xmlSerializer$5 = new coreHttp.Serializer(Mappers, /* isXml */ true); +var serializer$2 = new coreHttp.Serializer(Mappers, /* isXml */ false); +var uploadOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -75800,7 +77032,7 @@ const uploadOperationSpec = { mediaType: "binary", serializer: serializer$2 }; -const putBlobFromUrlOperationSpec = { +var putBlobFromUrlOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -75852,7 +77084,7 @@ const putBlobFromUrlOperationSpec = { isXML: true, serializer: xmlSerializer$5 }; -const stageBlockOperationSpec = { +var stageBlockOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -75888,7 +77120,7 @@ const stageBlockOperationSpec = { mediaType: "binary", serializer: serializer$2 }; -const stageBlockFromURLOperationSpec = { +var stageBlockFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -75928,7 +77160,7 @@ const stageBlockFromURLOperationSpec = { isXML: true, serializer: xmlSerializer$5 }; -const commitBlockListOperationSpec = { +var commitBlockListOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -75975,7 +77207,7 @@ const commitBlockListOperationSpec = { mediaType: "xml", serializer: xmlSerializer$5 }; -const getBlockListOperationSpec = { +var getBlockListOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { @@ -76010,23 +77242,23 @@ const getBlockListOperationSpec = { /** * The `@azure/logger` configuration for this package. */ -const logger = logger$1.createClientLogger("storage-blob"); +var logger = logger$1.createClientLogger("storage-blob"); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -const SDK_VERSION = "12.7.0"; -const SERVICE_VERSION = "2020-08-04"; -const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB -const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB -const BLOCK_BLOB_MAX_BLOCKS = 50000; -const DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; // 8MB -const DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; // 4MB -const DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; +var SDK_VERSION = "12.6.0"; +var SERVICE_VERSION = "2020-08-04"; +var BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB +var BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB +var BLOCK_BLOB_MAX_BLOCKS = 50000; +var DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; // 8MB +var DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; // 4MB +var DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; /** * The OAuth scope to use with Azure Storage. */ -const StorageOAuthScopes = "https://storage.azure.com/.default"; -const URLConstants = { +var StorageOAuthScopes = "https://storage.azure.com/.default"; +var URLConstants = { Parameters: { FORCE_BROWSER_NO_CACHE: "_", SIGNATURE: "sig", @@ -76035,14 +77267,14 @@ const URLConstants = { TIMEOUT: "timeout" } }; -const HTTPURLConnection = { +var HTTPURLConnection = { HTTP_ACCEPTED: 202, HTTP_CONFLICT: 409, HTTP_NOT_FOUND: 404, HTTP_PRECON_FAILED: 412, HTTP_RANGE_NOT_SATISFIABLE: 416 }; -const HeaderConstants = { +var HeaderConstants = { AUTHORIZATION: "Authorization", AUTHORIZATION_SCHEME: "Bearer", CONTENT_ENCODING: "Content-Encoding", @@ -76067,16 +77299,16 @@ const HeaderConstants = { X_MS_ERROR_CODE: "x-ms-error-code", X_MS_VERSION: "x-ms-version" }; -const ETagNone = ""; -const ETagAny = "*"; -const SIZE_1_MB = 1 * 1024 * 1024; -const BATCH_MAX_REQUEST = 256; -const BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB; -const HTTP_LINE_ENDING = "\r\n"; -const HTTP_VERSION_1_1 = "HTTP/1.1"; -const EncryptionAlgorithmAES25 = "AES256"; -const DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; -const StorageBlobLoggingAllowedHeaderNames = [ +var ETagNone = ""; +var ETagAny = "*"; +var SIZE_1_MB = 1 * 1024 * 1024; +var BATCH_MAX_REQUEST = 256; +var BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB; +var HTTP_LINE_ENDING = "\r\n"; +var HTTP_VERSION_1_1 = "HTTP/1.1"; +var EncryptionAlgorithmAES25 = "AES256"; +var DevelopmentConnectionString = "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;"; +var StorageBlobLoggingAllowedHeaderNames = [ "Access-Control-Allow-Origin", "Cache-Control", "Content-Length", @@ -76172,7 +77404,7 @@ const StorageBlobLoggingAllowedHeaderNames = [ "x-ms-if-tags", "x-ms-source-if-tags" ]; -const StorageBlobLoggingAllowedQueryParameters = [ +var StorageBlobLoggingAllowedQueryParameters = [ "comp", "maxresults", "rscc", @@ -76262,8 +77494,8 @@ const StorageBlobLoggingAllowedQueryParameters = [ * @param url - */ function escapeURLPath(url) { - const urlParsed = coreHttp.URLBuilder.parse(url); - let path = urlParsed.getPath(); + var urlParsed = coreHttp.URLBuilder.parse(url); + var path = urlParsed.getPath(); path = path || "/"; path = escape(path); urlParsed.setPath(path); @@ -76272,11 +77504,12 @@ function escapeURLPath(url) { function getProxyUriFromDevConnString(connectionString) { // Development Connection String // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key - let proxyUri = ""; + var proxyUri = ""; if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri - const matchCredentials = connectionString.split(";"); - for (const element of matchCredentials) { + var matchCredentials = connectionString.split(";"); + for (var _i = 0, matchCredentials_1 = matchCredentials; _i < matchCredentials_1.length; _i++) { + var element = matchCredentials_1[_i]; if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; } @@ -76285,8 +77518,9 @@ function getProxyUriFromDevConnString(connectionString) { return proxyUri; } function getValueInConnString(connectionString, argument) { - const elements = connectionString.split(";"); - for (const element of elements) { + var elements = connectionString.split(";"); + for (var _i = 0, elements_1 = elements; _i < elements_1.length; _i++) { + var element = elements_1[_i]; if (element.trim().startsWith(argument)) { return element.trim().match(argument + "=(.*)")[1]; } @@ -76300,24 +77534,24 @@ function getValueInConnString(connectionString, argument) { * @returns String key value pairs of the storage account's url and credentials. */ function extractConnectionStringParts(connectionString) { - let proxyUri = ""; + var proxyUri = ""; if (connectionString.startsWith("UseDevelopmentStorage=true")) { // Development connection string proxyUri = getProxyUriFromDevConnString(connectionString); connectionString = DevelopmentConnectionString; } // Matching BlobEndpoint in the Account connection string - let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); + var blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); // Slicing off '/' at the end if exists // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end) blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && connectionString.search("AccountKey=") !== -1) { // Account connection string - let defaultEndpointsProtocol = ""; - let accountName = ""; - let accountKey = Buffer.from("accountKey", "base64"); - let endpointSuffix = ""; + var defaultEndpointsProtocol = ""; + var accountName = ""; + var accountKey = Buffer.from("accountKey", "base64"); + var endpointSuffix = ""; // Get account name and key accountName = getValueInConnString(connectionString, "AccountName"); accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); @@ -76325,7 +77559,7 @@ function extractConnectionStringParts(connectionString) { // BlobEndpoint is not present in the Account connection string // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}` defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); - const protocol = defaultEndpointsProtocol.toLowerCase(); + var protocol = defaultEndpointsProtocol.toLowerCase(); if (protocol !== "https" && protocol !== "http") { throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); } @@ -76333,7 +77567,7 @@ function extractConnectionStringParts(connectionString) { if (!endpointSuffix) { throw new Error("Invalid EndpointSuffix in the provided Connection String"); } - blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + blobEndpoint = defaultEndpointsProtocol + "://" + accountName + ".blob." + endpointSuffix; } if (!accountName) { throw new Error("Invalid AccountName in the provided Connection String"); @@ -76344,22 +77578,22 @@ function extractConnectionStringParts(connectionString) { return { kind: "AccountConnString", url: blobEndpoint, - accountName, - accountKey, - proxyUri + accountName: accountName, + accountKey: accountKey, + proxyUri: proxyUri }; } else { // SAS connection string - const accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); - const accountName = getAccountNameFromUrl(blobEndpoint); + var accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); + var accountName = getAccountNameFromUrl(blobEndpoint); if (!blobEndpoint) { throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); } else if (!accountSas) { throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); } - return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; + return { kind: "SASConnString", url: blobEndpoint, accountName: accountName, accountSas: accountSas }; } } /** @@ -76383,9 +77617,9 @@ function escape(text) { * @returns An updated URL string */ function appendToURLPath(url, name) { - const urlParsed = coreHttp.URLBuilder.parse(url); - let path = urlParsed.getPath(); - path = path ? (path.endsWith("/") ? `${path}${name}` : `${path}/${name}`) : name; + var urlParsed = coreHttp.URLBuilder.parse(url); + var path = urlParsed.getPath(); + path = path ? (path.endsWith("/") ? "" + path + name : path + "/" + name) : name; urlParsed.setPath(path); return urlParsed.toString(); } @@ -76399,7 +77633,7 @@ function appendToURLPath(url, name) { * @returns An updated URL string */ function setURLParameter(url, name, value) { - const urlParsed = coreHttp.URLBuilder.parse(url); + var urlParsed = coreHttp.URLBuilder.parse(url); urlParsed.setQueryParameter(name, value); return urlParsed.toString(); } @@ -76410,7 +77644,7 @@ function setURLParameter(url, name, value) { * @param name - */ function getURLParameter(url, name) { - const urlParsed = coreHttp.URLBuilder.parse(url); + var urlParsed = coreHttp.URLBuilder.parse(url); return urlParsed.getQueryParameterValue(name); } /** @@ -76421,7 +77655,7 @@ function getURLParameter(url, name) { * @returns An updated URL string */ function setURLHost(url, host) { - const urlParsed = coreHttp.URLBuilder.parse(url); + var urlParsed = coreHttp.URLBuilder.parse(url); urlParsed.setHost(host); return urlParsed.toString(); } @@ -76431,7 +77665,7 @@ function setURLHost(url, host) { * @param url - Source URL string */ function getURLPath(url) { - const urlParsed = coreHttp.URLBuilder.parse(url); + var urlParsed = coreHttp.URLBuilder.parse(url); return urlParsed.getPath(); } /** @@ -76440,7 +77674,7 @@ function getURLPath(url) { * @param url - Source URL string */ function getURLScheme(url) { - const urlParsed = coreHttp.URLBuilder.parse(url); + var urlParsed = coreHttp.URLBuilder.parse(url); return urlParsed.getScheme(); } /** @@ -76449,17 +77683,17 @@ function getURLScheme(url) { * @param url - Source URL string */ function getURLPathAndQuery(url) { - const urlParsed = coreHttp.URLBuilder.parse(url); - const pathString = urlParsed.getPath(); + var urlParsed = coreHttp.URLBuilder.parse(url); + var pathString = urlParsed.getPath(); if (!pathString) { throw new RangeError("Invalid url without valid path."); } - let queryString = urlParsed.getQuery() || ""; + var queryString = urlParsed.getQuery() || ""; queryString = queryString.trim(); if (queryString != "") { - queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; // Ensure query string start with '?' + queryString = queryString.startsWith("?") ? queryString : "?" + queryString; // Ensure query string start with '?' } - return `${pathString}${queryString}`; + return "" + pathString + queryString; } /** * Get URL query key value pairs from an URL string. @@ -76467,23 +77701,24 @@ function getURLPathAndQuery(url) { * @param url - */ function getURLQueries(url) { - let queryString = coreHttp.URLBuilder.parse(url).getQuery(); + var queryString = coreHttp.URLBuilder.parse(url).getQuery(); if (!queryString) { return {}; } queryString = queryString.trim(); queryString = queryString.startsWith("?") ? queryString.substr(1) : queryString; - let querySubStrings = queryString.split("&"); - querySubStrings = querySubStrings.filter((value) => { - const indexOfEqual = value.indexOf("="); - const lastIndexOfEqual = value.lastIndexOf("="); + var querySubStrings = queryString.split("&"); + querySubStrings = querySubStrings.filter(function (value) { + var indexOfEqual = value.indexOf("="); + var lastIndexOfEqual = value.lastIndexOf("="); return (indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1); }); - const queries = {}; - for (const querySubString of querySubStrings) { - const splitResults = querySubString.split("="); - const key = splitResults[0]; - const value = splitResults[1]; + var queries = {}; + for (var _i = 0, querySubStrings_1 = querySubStrings; _i < querySubStrings_1.length; _i++) { + var querySubString = querySubStrings_1[_i]; + var splitResults = querySubString.split("="); + var key = splitResults[0]; + var value = splitResults[1]; queries[key] = value; } return queries; @@ -76496,8 +77731,8 @@ function getURLQueries(url) { * @returns An updated URL string. */ function appendToURLQuery(url, queryParts) { - const urlParsed = coreHttp.URLBuilder.parse(url); - let query = urlParsed.getQuery(); + var urlParsed = coreHttp.URLBuilder.parse(url); + var query = urlParsed.getQuery(); if (query) { query += "&" + queryParts; } @@ -76515,9 +77750,10 @@ function appendToURLQuery(url, queryParts) { * If false, YYYY-MM-DDThh:mm:ssZ will be returned. * @returns Date string in ISO8061 format, with or without 7 milliseconds component */ -function truncatedISO8061Date(date, withMilliseconds = true) { +function truncatedISO8061Date(date, withMilliseconds) { + if (withMilliseconds === void 0) { withMilliseconds = true; } // Date.toISOString() will return like "2018-10-29T06:34:36.139Z" - const dateString = date.toISOString(); + var dateString = date.toISOString(); return withMilliseconds ? dateString.substring(0, dateString.length - 1) + "0000" + "Z" : dateString.substring(0, dateString.length - 5) + "Z"; @@ -76537,14 +77773,14 @@ function base64encode(content) { */ function generateBlockID(blockIDPrefix, blockIndex) { // To generate a 64 bytes base64 string, source string should be 48 - const maxSourceStringLength = 48; + var maxSourceStringLength = 48; // A blob can have a maximum of 100,000 uncommitted blocks at any given time - const maxBlockIndexLength = 6; - const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; + var maxBlockIndexLength = 6; + var maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) { blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength); } - const res = blockIDPrefix + + var res = blockIDPrefix + padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0"); return base64encode(res); } @@ -76555,25 +77791,29 @@ function generateBlockID(blockIDPrefix, blockIndex) { * @param aborter - * @param abortError - */ -async function delay(timeInMs, aborter, abortError) { - return new Promise((resolve, reject) => { - let timeout; - const abortHandler = () => { - if (timeout !== undefined) { - clearTimeout(timeout); - } - reject(abortError); - }; - const resolveHandler = () => { - if (aborter !== undefined) { - aborter.removeEventListener("abort", abortHandler); - } - resolve(); - }; - timeout = setTimeout(resolveHandler, timeInMs); - if (aborter !== undefined) { - aborter.addEventListener("abort", abortHandler); - } +function delay(timeInMs, aborter, abortError) { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + return [2 /*return*/, new Promise(function (resolve, reject) { + var timeout; + var abortHandler = function () { + if (timeout !== undefined) { + clearTimeout(timeout); + } + reject(abortError); + }; + var resolveHandler = function () { + if (aborter !== undefined) { + aborter.removeEventListener("abort", abortHandler); + } + resolve(); + }; + timeout = setTimeout(resolveHandler, timeInMs); + if (aborter !== undefined) { + aborter.addEventListener("abort", abortHandler); + } + })]; + }); }); } /** @@ -76583,7 +77823,8 @@ async function delay(timeInMs, aborter, abortError) { * @param targetLength - * @param padString - */ -function padStart(currentString, targetLength, padString = " ") { +function padStart(currentString, targetLength, padString) { + if (padString === void 0) { padString = " "; } // TS doesn't know this code needs to run downlevel sometimes. // @ts-expect-error if (String.prototype.padStart) { @@ -76616,8 +77857,8 @@ function iEqual(str1, str2) { * @returns with the account name */ function getAccountNameFromUrl(url) { - const parsedUrl = coreHttp.URLBuilder.parse(url); - let accountName; + var parsedUrl = coreHttp.URLBuilder.parse(url); + var accountName; try { if (parsedUrl.getHost().split(".")[1] === "blob") { // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; @@ -76643,7 +77884,7 @@ function isIpEndpointStyle(parsedUrl) { if (parsedUrl.getHost() == undefined) { return false; } - const host = parsedUrl.getHost() + (parsedUrl.getPort() == undefined ? "" : ":" + parsedUrl.getPort()); + var host = parsedUrl.getHost() + (parsedUrl.getPort() == undefined ? "" : ":" + parsedUrl.getPort()); // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'. // Case 2: localhost(:port), use broad regex to match port part. // Case 3: Ipv4, use broad regex which just check if host contains Ipv4. @@ -76659,11 +77900,11 @@ function toBlobTagsString(tags) { if (tags === undefined) { return undefined; } - const tagPairs = []; - for (const key in tags) { + var tagPairs = []; + for (var key in tags) { if (tags.hasOwnProperty(key)) { - const value = tags[key]; - tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`); + var value = tags[key]; + tagPairs.push(encodeURIComponent(key) + "=" + encodeURIComponent(value)); } } return tagPairs.join("&"); @@ -76677,15 +77918,15 @@ function toBlobTags(tags) { if (tags === undefined) { return undefined; } - const res = { + var res = { blobTagSet: [] }; - for (const key in tags) { + for (var key in tags) { if (tags.hasOwnProperty(key)) { - const value = tags[key]; + var value = tags[key]; res.blobTagSet.push({ - key, - value + key: key, + value: value }); } } @@ -76700,8 +77941,9 @@ function toTags(tags) { if (tags === undefined) { return undefined; } - const res = {}; - for (const blobTag of tags.blobTagSet) { + var res = {}; + for (var _i = 0, _a = tags.blobTagSet; _i < _a.length; _i++) { + var blobTag = _a[_i]; res[blobTag.key] = blobTag.value; } return res; @@ -76760,18 +78002,18 @@ function parseObjectReplicationRecord(objectReplicationRecord) { // the policy id should already be stored in the ObjectReplicationDestinationPolicyId. return undefined; } - const orProperties = []; - for (const key in objectReplicationRecord) { - const ids = key.split("_"); - const policyPrefix = "or-"; + var orProperties = []; + var _loop_1 = function (key) { + var ids = key.split("_"); + var policyPrefix = "or-"; if (ids[0].startsWith(policyPrefix)) { ids[0] = ids[0].substring(policyPrefix.length); } - const rule = { + var rule = { ruleId: ids[1], replicationStatus: objectReplicationRecord[key] }; - const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); + var policyIndex = orProperties.findIndex(function (policy) { return policy.policyId === ids[0]; }); if (policyIndex > -1) { orProperties[policyIndex].rules.push(rule); } @@ -76781,6 +78023,9 @@ function parseObjectReplicationRecord(objectReplicationRecord) { rules: [rule] }); } + }; + for (var key in objectReplicationRecord) { + _loop_1(key); } return orProperties; } @@ -76807,42 +78052,51 @@ function attachCredential(thing, credential) { * * 3. Remove content-length header to avoid browsers warning */ -class StorageBrowserPolicy extends coreHttp.BaseRequestPolicy { +var StorageBrowserPolicy = /** @class */ (function (_super) { + tslib.__extends(StorageBrowserPolicy, _super); /** * Creates an instance of StorageBrowserPolicy. * @param nextPolicy - * @param options - */ - constructor(nextPolicy, options) { - super(nextPolicy, options); + function StorageBrowserPolicy(nextPolicy, options) { + return _super.call(this, nextPolicy, options) || this; } /** * Sends out request. * * @param request - */ - async sendRequest(request) { - { - return this._nextPolicy.sendRequest(request); - } - } -} + StorageBrowserPolicy.prototype.sendRequest = function (request) { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + { + return [2 /*return*/, this._nextPolicy.sendRequest(request)]; + } + }); + }); + }; + return StorageBrowserPolicy; +}(coreHttp.BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. /** * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects. */ -class StorageBrowserPolicyFactory { +var StorageBrowserPolicyFactory = /** @class */ (function () { + function StorageBrowserPolicyFactory() { + } /** * Creates a StorageBrowserPolicyFactory object. * * @param nextPolicy - * @param options - */ - create(nextPolicy, options) { + StorageBrowserPolicyFactory.prototype.create = function (nextPolicy, options) { return new StorageBrowserPolicy(nextPolicy, options); - } -} + }; + return StorageBrowserPolicyFactory; +}()); // Copyright (c) Microsoft Corporation. (function (StorageRetryPolicyType) { @@ -76856,7 +78110,7 @@ class StorageBrowserPolicyFactory { StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED"; })(exports.StorageRetryPolicyType || (exports.StorageRetryPolicyType = {})); // Default values of StorageRetryOptions -const DEFAULT_RETRY_OPTIONS = { +var DEFAULT_RETRY_OPTIONS = { maxRetryDelayInMs: 120 * 1000, maxTries: 4, retryDelayInMs: 4 * 1000, @@ -76864,11 +78118,12 @@ const DEFAULT_RETRY_OPTIONS = { secondaryHost: "", tryTimeoutInMs: undefined // Use server side default timeout strategy }; -const RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted."); +var RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted."); /** * Retry policy with exponential retry and linear retry implemented. */ -class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { +var StorageRetryPolicy = /** @class */ (function (_super) { + tslib.__extends(StorageRetryPolicy, _super); /** * Creates an instance of RetryPolicy. * @@ -76876,10 +78131,11 @@ class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { * @param options - * @param retryOptions - */ - constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS) { - super(nextPolicy, options); + function StorageRetryPolicy(nextPolicy, options, retryOptions) { + if (retryOptions === void 0) { retryOptions = DEFAULT_RETRY_OPTIONS; } + var _this = _super.call(this, nextPolicy, options) || this; // Initialize retry options - this.retryOptions = { + _this.retryOptions = { retryPolicyType: retryOptions.retryPolicyType ? retryOptions.retryPolicyType : DEFAULT_RETRY_OPTIONS.retryPolicyType, @@ -76901,15 +78157,20 @@ class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { ? retryOptions.secondaryHost : DEFAULT_RETRY_OPTIONS.secondaryHost }; + return _this; } /** * Sends request. * * @param request - */ - async sendRequest(request) { - return this.attemptSendRequest(request, false, 1); - } + StorageRetryPolicy.prototype.sendRequest = function (request) { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + return [2 /*return*/, this.attemptSendRequest(request, false, 1)]; + }); + }); + }; /** * Decide and perform next retry. Won't mutate request parameter. * @@ -76920,37 +78181,52 @@ class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { * @param attempt - How many retries has been attempted to performed, starting from 1, which includes * the attempt will be performed by this method call. */ - async attemptSendRequest(request, secondaryHas404, attempt) { - const newRequest = request.clone(); - const isPrimaryRetry = secondaryHas404 || - !this.retryOptions.secondaryHost || - !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || - attempt % 2 === 1; - if (!isPrimaryRetry) { - newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost); - } - // Set the server-side timeout query parameter "timeout=[seconds]" - if (this.retryOptions.tryTimeoutInMs) { - newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1000).toString()); - } - let response; - try { - logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); - response = await this._nextPolicy.sendRequest(newRequest); - if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { - return response; - } - secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404); - } - catch (err) { - logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); - if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { - throw err; - } - } - await this.delay(isPrimaryRetry, attempt, request.abortSignal); - return await this.attemptSendRequest(request, secondaryHas404, ++attempt); - } + StorageRetryPolicy.prototype.attemptSendRequest = function (request, secondaryHas404, attempt) { + return tslib.__awaiter(this, void 0, void 0, function () { + var newRequest, isPrimaryRetry, response, err_1; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + newRequest = request.clone(); + isPrimaryRetry = secondaryHas404 || + !this.retryOptions.secondaryHost || + !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || + attempt % 2 === 1; + if (!isPrimaryRetry) { + newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost); + } + // Set the server-side timeout query parameter "timeout=[seconds]" + if (this.retryOptions.tryTimeoutInMs) { + newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1000).toString()); + } + _a.label = 1; + case 1: + _a.trys.push([1, 3, , 4]); + logger.info("RetryPolicy: =====> Try=" + attempt + " " + (isPrimaryRetry ? "Primary" : "Secondary")); + return [4 /*yield*/, this._nextPolicy.sendRequest(newRequest)]; + case 2: + response = _a.sent(); + if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { + return [2 /*return*/, response]; + } + secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404); + return [3 /*break*/, 4]; + case 3: + err_1 = _a.sent(); + logger.error("RetryPolicy: Caught error, message: " + err_1.message + ", code: " + err_1.code); + if (!this.shouldRetry(isPrimaryRetry, attempt, response, err_1)) { + throw err_1; + } + return [3 /*break*/, 4]; + case 4: return [4 /*yield*/, this.delay(isPrimaryRetry, attempt, request.abortSignal)]; + case 5: + _a.sent(); + return [4 /*yield*/, this.attemptSendRequest(request, secondaryHas404, ++attempt)]; + case 6: return [2 /*return*/, _a.sent()]; + } + }); + }); + }; /** * Decide whether to retry according to last HTTP response and retry counters. * @@ -76959,15 +78235,15 @@ class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { * @param response - * @param err - */ - shouldRetry(isPrimaryRetry, attempt, response, err) { + StorageRetryPolicy.prototype.shouldRetry = function (isPrimaryRetry, attempt, response, err) { if (attempt >= this.retryOptions.maxTries) { - logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions - .maxTries}, no further try.`); + logger.info("RetryPolicy: Attempt(s) " + attempt + " >= maxTries " + this.retryOptions + .maxTries + ", no further try."); return false; } // Handle network failures, you may need to customize the list when you implement // your own http client - const retriableErrors = [ + var retriableErrors = [ "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", @@ -76979,11 +78255,12 @@ class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { "REQUEST_SEND_ERROR" // For default xhr based http client provided in ms-rest-js ]; if (err) { - for (const retriableError of retriableErrors) { + for (var _i = 0, retriableErrors_1 = retriableErrors; _i < retriableErrors_1.length; _i++) { + var retriableError = retriableErrors_1[_i]; if (err.name.toUpperCase().includes(retriableError) || err.message.toUpperCase().includes(retriableError) || (err.code && err.code.toString().toUpperCase() === retriableError)) { - logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + logger.info("RetryPolicy: Network error " + retriableError + " found, will retry."); return true; } } @@ -76992,23 +78269,23 @@ class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { // the resource was not found. This may be due to replication delay. So, in this // case, we'll never try the secondary again for this operation. if (response || err) { - const statusCode = response ? response.status : err ? err.statusCode : 0; + var statusCode = response ? response.status : err ? err.statusCode : 0; if (!isPrimaryRetry && statusCode === 404) { - logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + logger.info("RetryPolicy: Secondary access with 404, will retry."); return true; } // Server internal error or server timeout if (statusCode === 503 || statusCode === 500) { - logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + logger.info("RetryPolicy: Will retry for status code " + statusCode + "."); return true; } } - if ((err === null || err === void 0 ? void 0 : err.code) === "PARSE_ERROR" && (err === null || err === void 0 ? void 0 : err.message.startsWith(`Error "Error: Unclosed root tag`))) { + if ((err === null || err === void 0 ? void 0 : err.code) === "PARSE_ERROR" && (err === null || err === void 0 ? void 0 : err.message.startsWith("Error \"Error: Unclosed root tag"))) { logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); return true; } return false; - } + }; /** * Delay a calculated time between retries. * @@ -77016,36 +78293,42 @@ class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { * @param attempt - * @param abortSignal - */ - async delay(isPrimaryRetry, attempt, abortSignal) { - let delayTimeInMs = 0; - if (isPrimaryRetry) { - switch (this.retryOptions.retryPolicyType) { - case exports.StorageRetryPolicyType.EXPONENTIAL: - delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); - break; - case exports.StorageRetryPolicyType.FIXED: - delayTimeInMs = this.retryOptions.retryDelayInMs; - break; - } - } - else { - delayTimeInMs = Math.random() * 1000; - } - logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); - return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR); - } -} + StorageRetryPolicy.prototype.delay = function (isPrimaryRetry, attempt, abortSignal) { + return tslib.__awaiter(this, void 0, void 0, function () { + var delayTimeInMs; + return tslib.__generator(this, function (_a) { + delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (this.retryOptions.retryPolicyType) { + case exports.StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); + break; + case exports.StorageRetryPolicyType.FIXED: + delayTimeInMs = this.retryOptions.retryDelayInMs; + break; + } + } + else { + delayTimeInMs = Math.random() * 1000; + } + logger.info("RetryPolicy: Delay for " + delayTimeInMs + "ms"); + return [2 /*return*/, delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR)]; + }); + }); + }; + return StorageRetryPolicy; +}(coreHttp.BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. /** * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects. */ -class StorageRetryPolicyFactory { +var StorageRetryPolicyFactory = /** @class */ (function () { /** * Creates an instance of StorageRetryPolicyFactory. * @param retryOptions - */ - constructor(retryOptions) { + function StorageRetryPolicyFactory(retryOptions) { this.retryOptions = retryOptions; } /** @@ -77054,53 +78337,61 @@ class StorageRetryPolicyFactory { * @param nextPolicy - * @param options - */ - create(nextPolicy, options) { + StorageRetryPolicyFactory.prototype.create = function (nextPolicy, options) { return new StorageRetryPolicy(nextPolicy, options, this.retryOptions); - } -} + }; + return StorageRetryPolicyFactory; +}()); // Copyright (c) Microsoft Corporation. /** * Credential policy used to sign HTTP(S) requests before sending. This is an * abstract class. */ -class CredentialPolicy extends coreHttp.BaseRequestPolicy { +var CredentialPolicy = /** @class */ (function (_super) { + tslib.__extends(CredentialPolicy, _super); + function CredentialPolicy() { + return _super !== null && _super.apply(this, arguments) || this; + } /** * Sends out request. * * @param request - */ - sendRequest(request) { + CredentialPolicy.prototype.sendRequest = function (request) { return this._nextPolicy.sendRequest(this.signRequest(request)); - } + }; /** * Child classes must implement this method with request signing. This method * will be executed in {@link sendRequest}. * * @param request - */ - signRequest(request) { + CredentialPolicy.prototype.signRequest = function (request) { // Child classes must override this method with request signing. This method // will be executed in sendRequest(). return request; - } -} + }; + return CredentialPolicy; +}(coreHttp.BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. /** * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources * or for use with Shared Access Signatures (SAS). */ -class AnonymousCredentialPolicy extends CredentialPolicy { +var AnonymousCredentialPolicy = /** @class */ (function (_super) { + tslib.__extends(AnonymousCredentialPolicy, _super); /** * Creates an instance of AnonymousCredentialPolicy. * @param nextPolicy - * @param options - */ - constructor(nextPolicy, options) { - super(nextPolicy, options); + function AnonymousCredentialPolicy(nextPolicy, options) { + return _super.call(this, nextPolicy, options) || this; } -} + return AnonymousCredentialPolicy; +}(CredentialPolicy)); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. @@ -77108,21 +78399,24 @@ class AnonymousCredentialPolicy extends CredentialPolicy { * Credential is an abstract class for Azure Storage HTTP requests signing. This * class will host an credentialPolicyCreator factory which generates CredentialPolicy. */ -class Credential { +var Credential = /** @class */ (function () { + function Credential() { + } /** * Creates a RequestPolicy object. * * @param _nextPolicy - * @param _options - */ - create( + Credential.prototype.create = function ( // tslint:disable-next-line:variable-name _nextPolicy, // tslint:disable-next-line:variable-name _options) { throw new Error("Method should be implemented in children classes."); - } -} + }; + return Credential; +}()); // Copyright (c) Microsoft Corporation. /** @@ -77131,76 +78425,88 @@ class Credential { * HTTP(S) requests that read public resources or for use with Shared Access * Signatures (SAS). */ -class AnonymousCredential extends Credential { +var AnonymousCredential = /** @class */ (function (_super) { + tslib.__extends(AnonymousCredential, _super); + function AnonymousCredential() { + return _super !== null && _super.apply(this, arguments) || this; + } /** * Creates an {@link AnonymousCredentialPolicy} object. * * @param nextPolicy - * @param options - */ - create(nextPolicy, options) { + AnonymousCredential.prototype.create = function (nextPolicy, options) { return new AnonymousCredentialPolicy(nextPolicy, options); - } -} + }; + return AnonymousCredential; +}(Credential)); // Copyright (c) Microsoft Corporation. /** * TelemetryPolicy is a policy used to tag user-agent header for every requests. */ -class TelemetryPolicy extends coreHttp.BaseRequestPolicy { +var TelemetryPolicy = /** @class */ (function (_super) { + tslib.__extends(TelemetryPolicy, _super); /** * Creates an instance of TelemetryPolicy. * @param nextPolicy - * @param options - * @param telemetry - */ - constructor(nextPolicy, options, telemetry) { - super(nextPolicy, options); - this.telemetry = telemetry; + function TelemetryPolicy(nextPolicy, options, telemetry) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.telemetry = telemetry; + return _this; } /** * Sends out request. * * @param request - */ - async sendRequest(request) { - { - if (!request.headers) { - request.headers = new coreHttp.HttpHeaders(); - } - if (!request.headers.get(HeaderConstants.USER_AGENT)) { - request.headers.set(HeaderConstants.USER_AGENT, this.telemetry); - } - } - return this._nextPolicy.sendRequest(request); - } -} + TelemetryPolicy.prototype.sendRequest = function (request) { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + { + if (!request.headers) { + request.headers = new coreHttp.HttpHeaders(); + } + if (!request.headers.get(HeaderConstants.USER_AGENT)) { + request.headers.set(HeaderConstants.USER_AGENT, this.telemetry); + } + } + return [2 /*return*/, this._nextPolicy.sendRequest(request)]; + }); + }); + }; + return TelemetryPolicy; +}(coreHttp.BaseRequestPolicy)); // Copyright (c) Microsoft Corporation. /** * TelemetryPolicyFactory is a factory class helping generating {@link TelemetryPolicy} objects. */ -class TelemetryPolicyFactory { +var TelemetryPolicyFactory = /** @class */ (function () { /** * Creates an instance of TelemetryPolicyFactory. * @param telemetry - */ - constructor(telemetry) { - const userAgentInfo = []; + function TelemetryPolicyFactory(telemetry) { + var userAgentInfo = []; { if (telemetry) { - const telemetryString = telemetry.userAgentPrefix || ""; + var telemetryString = telemetry.userAgentPrefix || ""; if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) { userAgentInfo.push(telemetryString); } } // e.g. azsdk-js-storageblob/10.0.0 - const libInfo = `azsdk-js-storageblob/${SDK_VERSION}`; + var libInfo = "azsdk-js-storageblob/" + SDK_VERSION; if (userAgentInfo.indexOf(libInfo) === -1) { userAgentInfo.push(libInfo); } // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299) - const runtimeInfo = `(NODE-VERSION ${process.version}; ${os.type()} ${os.release()})`; + var runtimeInfo = "(NODE-VERSION " + process.version + "; " + os.type() + " " + os.release() + ")"; if (userAgentInfo.indexOf(runtimeInfo) === -1) { userAgentInfo.push(runtimeInfo); } @@ -77213,13 +78519,14 @@ class TelemetryPolicyFactory { * @param nextPolicy - * @param options - */ - create(nextPolicy, options) { + TelemetryPolicyFactory.prototype.create = function (nextPolicy, options) { return new TelemetryPolicy(nextPolicy, options, this.telemetryString); - } -} + }; + return TelemetryPolicyFactory; +}()); // Copyright (c) Microsoft Corporation. -const _defaultHttpClient = new coreHttp.DefaultHttpClient(); +var _defaultHttpClient = new coreHttp.DefaultHttpClient(); function getCachedDefaultHttpClient() { return _defaultHttpClient; } @@ -77233,18 +78540,19 @@ function getCachedDefaultHttpClient() { * Refer to {@link newPipeline} and provided policies before implementing your * customized Pipeline. */ -class Pipeline { +var Pipeline = /** @class */ (function () { /** * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. * * @param factories - * @param options - */ - constructor(factories, options = {}) { + function Pipeline(factories, options) { + if (options === void 0) { options = {}; } this.factories = factories; // when options.httpClient is not specified, passing in a DefaultHttpClient instance to // avoid each client creating its own http client. - this.options = Object.assign(Object.assign({}, options), { httpClient: options.httpClient || getCachedDefaultHttpClient() }); + this.options = tslib.__assign(tslib.__assign({}, options), { httpClient: options.httpClient || getCachedDefaultHttpClient() }); } /** * Transfer Pipeline object to ServiceClientOptions object which is required by @@ -77252,13 +78560,14 @@ class Pipeline { * * @returns The ServiceClientOptions object from this Pipeline. */ - toServiceClientOptions() { + Pipeline.prototype.toServiceClientOptions = function () { return { httpClient: this.options.httpClient, requestPolicyFactories: this.factories }; - } -} + }; + return Pipeline; +}()); /** * Creates a new Pipeline object with Credential provided. * @@ -77266,15 +78575,16 @@ class Pipeline { * @param pipelineOptions - Optional. Options. * @returns A new Pipeline object. */ -function newPipeline(credential, pipelineOptions = {}) { +function newPipeline(credential, pipelineOptions) { + if (pipelineOptions === void 0) { pipelineOptions = {}; } if (credential === undefined) { credential = new AnonymousCredential(); } // Order is important. Closer to the API at the top & closer to the network at the bottom. // The credential's policy factory must appear close to the wire so it can sign any // changes made by other factories (like UniqueRequestIDPolicyFactory) - const telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions); - const factories = [ + var telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions); + var factories = [ coreHttp.tracingPolicy({ userAgent: telemetryPolicy.telemetryString }), coreHttp.keepAlivePolicy(pipelineOptions.keepAliveOptions), telemetryPolicy, @@ -77306,28 +78616,30 @@ function newPipeline(credential, pipelineOptions = {}) { /** * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key. */ -class StorageSharedKeyCredentialPolicy extends CredentialPolicy { +var StorageSharedKeyCredentialPolicy = /** @class */ (function (_super) { + tslib.__extends(StorageSharedKeyCredentialPolicy, _super); /** * Creates an instance of StorageSharedKeyCredentialPolicy. * @param nextPolicy - * @param options - * @param factory - */ - constructor(nextPolicy, options, factory) { - super(nextPolicy, options); - this.factory = factory; + function StorageSharedKeyCredentialPolicy(nextPolicy, options, factory) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.factory = factory; + return _this; } /** * Signs request. * * @param request - */ - signRequest(request) { + StorageSharedKeyCredentialPolicy.prototype.signRequest = function (request) { request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); if (request.body && typeof request.body === "string" && request.body.length > 0) { request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); } - const stringToSign = [ + var stringToSign = [ request.method.toUpperCase(), this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), @@ -77344,14 +78656,14 @@ class StorageSharedKeyCredentialPolicy extends CredentialPolicy { "\n" + this.getCanonicalizedHeadersString(request) + this.getCanonicalizedResourceString(request); - const signature = this.factory.computeHMACSHA256(stringToSign); - request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); + var signature = this.factory.computeHMACSHA256(stringToSign); + request.headers.set(HeaderConstants.AUTHORIZATION, "SharedKey " + this.factory.accountName + ":" + signature); // console.log(`[URL]:${request.url}`); // console.log(`[HEADERS]:${request.headers.toString()}`); // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`); // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`); return request; - } + }; /** * Retrieve header value according to shared key sign rules. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key @@ -77359,8 +78671,8 @@ class StorageSharedKeyCredentialPolicy extends CredentialPolicy { * @param request - * @param headerName - */ - getHeaderValueToSign(request, headerName) { - const value = request.headers.get(headerName); + StorageSharedKeyCredentialPolicy.prototype.getHeaderValueToSign = function (request, headerName) { + var value = request.headers.get(headerName); if (!value) { return ""; } @@ -77371,7 +78683,7 @@ class StorageSharedKeyCredentialPolicy extends CredentialPolicy { return ""; } return value; - } + }; /** * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. @@ -77385,56 +78697,58 @@ class StorageSharedKeyCredentialPolicy extends CredentialPolicy { * * @param request - */ - getCanonicalizedHeadersString(request) { - let headersArray = request.headers.headersArray().filter((value) => { + StorageSharedKeyCredentialPolicy.prototype.getCanonicalizedHeadersString = function (request) { + var headersArray = request.headers.headersArray().filter(function (value) { return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE); }); - headersArray.sort((a, b) => { + headersArray.sort(function (a, b) { return a.name.toLowerCase().localeCompare(b.name.toLowerCase()); }); // Remove duplicate headers - headersArray = headersArray.filter((value, index, array) => { + headersArray = headersArray.filter(function (value, index, array) { if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { return false; } return true; }); - let canonicalizedHeadersStringToSign = ""; - headersArray.forEach((header) => { - canonicalizedHeadersStringToSign += `${header.name + var canonicalizedHeadersStringToSign = ""; + headersArray.forEach(function (header) { + canonicalizedHeadersStringToSign += header.name .toLowerCase() - .trimRight()}:${header.value.trimLeft()}\n`; + .trimRight() + ":" + header.value.trimLeft() + "\n"; }); return canonicalizedHeadersStringToSign; - } + }; /** * Retrieves the webResource canonicalized resource string. * * @param request - */ - getCanonicalizedResourceString(request) { - const path = getURLPath(request.url) || "/"; - let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path}`; - const queries = getURLQueries(request.url); - const lowercaseQueries = {}; + StorageSharedKeyCredentialPolicy.prototype.getCanonicalizedResourceString = function (request) { + var path = getURLPath(request.url) || "/"; + var canonicalizedResourceString = ""; + canonicalizedResourceString += "/" + this.factory.accountName + path; + var queries = getURLQueries(request.url); + var lowercaseQueries = {}; if (queries) { - const queryKeys = []; - for (const key in queries) { + var queryKeys = []; + for (var key in queries) { if (queries.hasOwnProperty(key)) { - const lowercaseKey = key.toLowerCase(); + var lowercaseKey = key.toLowerCase(); lowercaseQueries[lowercaseKey] = queries[key]; queryKeys.push(lowercaseKey); } } queryKeys.sort(); - for (const key of queryKeys) { - canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`; + for (var _i = 0, queryKeys_1 = queryKeys; _i < queryKeys_1.length; _i++) { + var key = queryKeys_1[_i]; + canonicalizedResourceString += "\n" + key + ":" + decodeURIComponent(lowercaseQueries[key]); } } return canonicalizedResourceString; - } -} + }; + return StorageSharedKeyCredentialPolicy; +}(CredentialPolicy)); // Copyright (c) Microsoft Corporation. /** @@ -77442,16 +78756,18 @@ class StorageSharedKeyCredentialPolicy extends CredentialPolicy { * * StorageSharedKeyCredential for account key authorization of Azure Storage service. */ -class StorageSharedKeyCredential extends Credential { +var StorageSharedKeyCredential = /** @class */ (function (_super) { + tslib.__extends(StorageSharedKeyCredential, _super); /** * Creates an instance of StorageSharedKeyCredential. * @param accountName - * @param accountKey - */ - constructor(accountName, accountKey) { - super(); - this.accountName = accountName; - this.accountKey = Buffer.from(accountKey, "base64"); + function StorageSharedKeyCredential(accountName, accountKey) { + var _this = _super.call(this) || this; + _this.accountName = accountName; + _this.accountKey = Buffer.from(accountKey, "base64"); + return _this; } /** * Creates a StorageSharedKeyCredentialPolicy object. @@ -77459,20 +78775,21 @@ class StorageSharedKeyCredential extends Credential { * @param nextPolicy - * @param options - */ - create(nextPolicy, options) { + StorageSharedKeyCredential.prototype.create = function (nextPolicy, options) { return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this); - } + }; /** * Generates a hash signature for an HTTP request or for a SAS. * * @param stringToSign - */ - computeHMACSHA256(stringToSign) { + StorageSharedKeyCredential.prototype.computeHMACSHA256 = function (stringToSign) { return crypto.createHmac("sha256", this.accountKey) .update(stringToSign, "utf8") .digest("base64"); - } -} + }; + return StorageSharedKeyCredential; +}(Credential)); /* * Copyright (c) Microsoft Corporation. @@ -77481,16 +78798,18 @@ class StorageSharedKeyCredential extends Credential { * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -const packageName = "azure-storage-blob"; -const packageVersion = "12.6.0-beta.1"; -class StorageClientContext extends coreHttp.ServiceClient { +var packageName = "azure-storage-blob"; +var packageVersion = "12.6.0-beta.1"; +var StorageClientContext = /** @class */ (function (_super) { + tslib.__extends(StorageClientContext, _super); /** * Initializes a new instance of the StorageClientContext class. * @param url The URL of the service account, container, or blob that is the targe of the desired * operation. * @param options The parameter options */ - constructor(url, options) { + function StorageClientContext(url, options) { + var _this = this; if (url === undefined) { throw new Error("'url' cannot be null"); } @@ -77499,31 +78818,33 @@ class StorageClientContext extends coreHttp.ServiceClient { options = {}; } if (!options.userAgent) { - const defaultUserAgent = coreHttp.getDefaultUserAgentValue(); - options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; + var defaultUserAgent = coreHttp.getDefaultUserAgentValue(); + options.userAgent = packageName + "/" + packageVersion + " " + defaultUserAgent; } - super(undefined, options); - this.requestContentType = "application/json; charset=utf-8"; - this.baseUri = options.endpoint || "{url}"; + _this = _super.call(this, undefined, options) || this; + _this.requestContentType = "application/json; charset=utf-8"; + _this.baseUri = options.endpoint || "{url}"; // Parameter assignments - this.url = url; + _this.url = url; // Assigning values to Constant parameters - this.version = options.version || "2020-08-04"; + _this.version = options.version || "2020-08-04"; + return _this; } -} + return StorageClientContext; +}(coreHttp.ServiceClient)); // Copyright (c) Microsoft Corporation. /** * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient} * and etc. */ -class StorageClient { +var StorageClient = /** @class */ (function () { /** * Creates an instance of StorageClient. * @param url - url to resource * @param pipeline - request policy pipeline. */ - constructor(url, pipeline) { + function StorageClient(url, pipeline) { // URL should be encoded and only once, protocol layer shouldn't encode URL again this.url = escapeURLPath(url); this.accountName = getAccountNameFromUrl(url); @@ -77531,7 +78852,8 @@ class StorageClient { this.storageClientContext = new StorageClientContext(this.url, pipeline.toServiceClientOptions()); this.isHttps = iEqual(getURLScheme(this.url) || "", "https"); this.credential = new AnonymousCredential(); - for (const factory of this.pipeline.factories) { + for (var _i = 0, _a = this.pipeline.factories; _i < _a.length; _i++) { + var factory = _a[_i]; if ((coreHttp.isNode && factory instanceof StorageSharedKeyCredential) || factory instanceof AnonymousCredential) { this.credential = factory; @@ -77543,17 +78865,18 @@ class StorageClient { } } // Override protocol layer's default content-type - const storageClientContext = this.storageClientContext; + var storageClientContext = this.storageClientContext; storageClientContext.requestContentType = undefined; } -} + return StorageClient; +}()); // Copyright (c) Microsoft Corporation. /** * Creates a span using the global tracer. * @internal */ -const createSpan = coreTracing.createSpanFunction({ +var createSpan = coreTracing.createSpanFunction({ packagePrefix: "Azure.Storage.Blob", namespace: "Microsoft.Storage" }); @@ -77583,8 +78906,8 @@ function convertTracingToRequestOptionsBase(options) { * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but * the order of the permissions is particular and this class guarantees correctness. */ -class BlobSASPermissions { - constructor() { +var BlobSASPermissions = /** @class */ (function () { + function BlobSASPermissions() { /** * Specifies Read access granted. */ @@ -77628,9 +78951,10 @@ class BlobSASPermissions { * * @param permissions - */ - static parse(permissions) { - const blobSASPermissions = new BlobSASPermissions(); - for (const char of permissions) { + BlobSASPermissions.parse = function (permissions) { + var blobSASPermissions = new BlobSASPermissions(); + for (var _i = 0, permissions_1 = permissions; _i < permissions_1.length; _i++) { + var char = permissions_1[_i]; switch (char) { case "r": blobSASPermissions.read = true; @@ -77660,19 +78984,19 @@ class BlobSASPermissions { blobSASPermissions.execute = true; break; default: - throw new RangeError(`Invalid permission: ${char}`); + throw new RangeError("Invalid permission: " + char); } } return blobSASPermissions; - } + }; /** * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it * and boolean values for them. * * @param permissionLike - */ - static from(permissionLike) { - const blobSASPermissions = new BlobSASPermissions(); + BlobSASPermissions.from = function (permissionLike) { + var blobSASPermissions = new BlobSASPermissions(); if (permissionLike.read) { blobSASPermissions.read = true; } @@ -77701,15 +79025,15 @@ class BlobSASPermissions { blobSASPermissions.execute = true; } return blobSASPermissions; - } + }; /** * Converts the given permissions to a string. Using this method will guarantee the permissions are in an * order accepted by the service. * * @returns A string which represents the BlobSASPermissions */ - toString() { - const permissions = []; + BlobSASPermissions.prototype.toString = function () { + var permissions = []; if (this.read) { permissions.push("r"); } @@ -77738,8 +79062,9 @@ class BlobSASPermissions { permissions.push("e"); } return permissions.join(""); - } -} + }; + return BlobSASPermissions; +}()); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. @@ -77750,8 +79075,8 @@ class BlobSASPermissions { * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but * the order of the permissions is particular and this class guarantees correctness. */ -class ContainerSASPermissions { - constructor() { +var ContainerSASPermissions = /** @class */ (function () { + function ContainerSASPermissions() { /** * Specifies Read access granted. */ @@ -77799,9 +79124,10 @@ class ContainerSASPermissions { * * @param permissions - */ - static parse(permissions) { - const containerSASPermissions = new ContainerSASPermissions(); - for (const char of permissions) { + ContainerSASPermissions.parse = function (permissions) { + var containerSASPermissions = new ContainerSASPermissions(); + for (var _i = 0, permissions_1 = permissions; _i < permissions_1.length; _i++) { + var char = permissions_1[_i]; switch (char) { case "r": containerSASPermissions.read = true; @@ -77834,19 +79160,19 @@ class ContainerSASPermissions { containerSASPermissions.execute = true; break; default: - throw new RangeError(`Invalid permission ${char}`); + throw new RangeError("Invalid permission " + char); } } return containerSASPermissions; - } + }; /** * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it * and boolean values for them. * * @param permissionLike - */ - static from(permissionLike) { - const containerSASPermissions = new ContainerSASPermissions(); + ContainerSASPermissions.from = function (permissionLike) { + var containerSASPermissions = new ContainerSASPermissions(); if (permissionLike.read) { containerSASPermissions.read = true; } @@ -77878,7 +79204,7 @@ class ContainerSASPermissions { containerSASPermissions.execute = true; } return containerSASPermissions; - } + }; /** * Converts the given permissions to a string. Using this method will guarantee the permissions are in an * order accepted by the service. @@ -77887,8 +79213,8 @@ class ContainerSASPermissions { * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas * */ - toString() { - const permissions = []; + ContainerSASPermissions.prototype.toString = function () { + var permissions = []; if (this.read) { permissions.push("r"); } @@ -77920,8 +79246,9 @@ class ContainerSASPermissions { permissions.push("e"); } return permissions.join(""); - } -} + }; + return ContainerSASPermissions; +}()); // Copyright (c) Microsoft Corporation. /** @@ -77930,13 +79257,13 @@ class ContainerSASPermissions { * UserDelegationKeyCredential is only used for generation of user delegation SAS. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas */ -class UserDelegationKeyCredential { +var UserDelegationKeyCredential = /** @class */ (function () { /** * Creates an instance of UserDelegationKeyCredential. * @param accountName - * @param userDelegationKey - */ - constructor(accountName, userDelegationKey) { + function UserDelegationKeyCredential(accountName, userDelegationKey) { this.accountName = accountName; this.userDelegationKey = userDelegationKey; this.key = Buffer.from(userDelegationKey.value, "base64"); @@ -77946,13 +79273,14 @@ class UserDelegationKeyCredential { * * @param stringToSign - */ - computeHMACSHA256(stringToSign) { + UserDelegationKeyCredential.prototype.computeHMACSHA256 = function (stringToSign) { // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`); return crypto.createHmac("sha256", this.key) .update(stringToSign, "utf8") .digest("base64"); - } -} + }; + return UserDelegationKeyCredential; +}()); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. @@ -77964,7 +79292,7 @@ class UserDelegationKeyCredential { * @param ipRange - */ function ipRangeToString(ipRange) { - return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start; + return ipRange.end ? ipRange.start + "-" + ipRange.end : ipRange.start; } // Copyright (c) Microsoft Corporation. @@ -77987,8 +79315,8 @@ function ipRangeToString(ipRange) { * * NOTE: Instances of this class are immutable. */ -class SASQueryParameters { - constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId) { +var SASQueryParameters = /** @class */ (function () { + function SASQueryParameters(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId) { this.version = version; this.signature = signature; if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== "string") { @@ -78045,26 +79373,30 @@ class SASQueryParameters { } } } - /** - * Optional. IP range allowed for this SAS. - * - * @readonly - */ - get ipRange() { - if (this.ipRangeInner) { - return { - end: this.ipRangeInner.end, - start: this.ipRangeInner.start - }; - } - return undefined; - } + Object.defineProperty(SASQueryParameters.prototype, "ipRange", { + /** + * Optional. IP range allowed for this SAS. + * + * @readonly + */ + get: function () { + if (this.ipRangeInner) { + return { + end: this.ipRangeInner.end, + start: this.ipRangeInner.start + }; + } + return undefined; + }, + enumerable: false, + configurable: true + }); /** * Encodes all SAS query parameters into a string that can be appended to a URL. * */ - toString() { - const params = [ + SASQueryParameters.prototype.toString = function () { + var params = [ "sv", "ss", "srt", @@ -78090,8 +79422,9 @@ class SASQueryParameters { "saoid", "scid" ]; - const queries = []; - for (const param of params) { + var queries = []; + for (var _i = 0, params_1 = params; _i < params_1.length; _i++) { + var param = params_1[_i]; switch (param) { case "sv": this.tryAppendQueryParameter(queries, param, this.version); @@ -78168,7 +79501,7 @@ class SASQueryParameters { } } return queries.join("&"); - } + }; /** * A private helper method used to filter and append query key/value pairs into an array. * @@ -78176,25 +79509,26 @@ class SASQueryParameters { * @param key - * @param value - */ - tryAppendQueryParameter(queries, key, value) { + SASQueryParameters.prototype.tryAppendQueryParameter = function (queries, key, value) { if (!value) { return; } key = encodeURIComponent(key); value = encodeURIComponent(value); if (key.length > 0 && value.length > 0) { - queries.push(`${key}=${value}`); + queries.push(key + "=" + value); } - } -} + }; + return SASQueryParameters; +}()); // Copyright (c) Microsoft Corporation. function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { - const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; - const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential + var version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + var sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential ? sharedKeyCredentialOrUserDelegationKey : undefined; - let userDelegationKeyCredential; + var userDelegationKeyCredential; if (sharedKeyCredential === undefined && accountName !== undefined) { userDelegationKeyCredential = new UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey); } @@ -78250,12 +79584,12 @@ function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKe !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); } - let resource = "c"; + var resource = "c"; if (blobSASSignatureValues.blobName) { resource = "b"; } // Calling parse and toString guarantees the proper ordering and throws on invalid characters. - let verifiedPermissions; + var verifiedPermissions; if (blobSASSignatureValues.permissions) { if (blobSASSignatureValues.blobName) { verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); @@ -78265,7 +79599,7 @@ function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKe } } // Signature is generated on the un-url-encoded values. - const stringToSign = [ + var stringToSign = [ verifiedPermissions ? verifiedPermissions : "", blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) @@ -78284,7 +79618,7 @@ function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKe blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" ].join("\n"); - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + var signature = sharedKeyCredential.computeHMACSHA256(stringToSign); return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); } /** @@ -78309,8 +79643,8 @@ function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKe !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; + var resource = "c"; + var timestamp = blobSASSignatureValues.snapshotTime; if (blobSASSignatureValues.blobName) { resource = "b"; if (blobSASSignatureValues.snapshotTime) { @@ -78322,7 +79656,7 @@ function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKe } } // Calling parse and toString guarantees the proper ordering and throws on invalid characters. - let verifiedPermissions; + var verifiedPermissions; if (blobSASSignatureValues.permissions) { if (blobSASSignatureValues.blobName) { verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); @@ -78332,7 +79666,7 @@ function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKe } } // Signature is generated on the un-url-encoded values. - const stringToSign = [ + var stringToSign = [ verifiedPermissions ? verifiedPermissions : "", blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) @@ -78353,7 +79687,7 @@ function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKe blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" ].join("\n"); - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + var signature = sharedKeyCredential.computeHMACSHA256(stringToSign); return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); } /** @@ -78376,8 +79710,8 @@ function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userD if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; + var resource = "c"; + var timestamp = blobSASSignatureValues.snapshotTime; if (blobSASSignatureValues.blobName) { resource = "b"; if (blobSASSignatureValues.snapshotTime) { @@ -78389,7 +79723,7 @@ function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userD } } // Calling parse and toString guarantees the proper ordering and throws on invalid characters. - let verifiedPermissions; + var verifiedPermissions; if (blobSASSignatureValues.permissions) { if (blobSASSignatureValues.blobName) { verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); @@ -78399,7 +79733,7 @@ function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userD } } // Signature is generated on the un-url-encoded values. - const stringToSign = [ + var stringToSign = [ verifiedPermissions ? verifiedPermissions : "", blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) @@ -78429,7 +79763,7 @@ function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userD blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType ].join("\n"); - const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + var signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey); } /** @@ -78452,8 +79786,8 @@ function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userD if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; + var resource = "c"; + var timestamp = blobSASSignatureValues.snapshotTime; if (blobSASSignatureValues.blobName) { resource = "b"; if (blobSASSignatureValues.snapshotTime) { @@ -78465,7 +79799,7 @@ function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userD } } // Calling parse and toString guarantees the proper ordering and throws on invalid characters. - let verifiedPermissions; + var verifiedPermissions; if (blobSASSignatureValues.permissions) { if (blobSASSignatureValues.blobName) { verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); @@ -78475,7 +79809,7 @@ function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userD } } // Signature is generated on the un-url-encoded values. - const stringToSign = [ + var stringToSign = [ verifiedPermissions ? verifiedPermissions : "", blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) @@ -78508,20 +79842,20 @@ function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userD blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType ].join("\n"); - const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + var signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId); } function getCanonicalName(accountName, containerName, blobName) { // Container: "/blob/account/containerName" // Blob: "/blob/account/containerName/blobName" - const elements = [`/blob/${accountName}/${containerName}`]; + var elements = ["/blob/" + accountName + "/" + containerName]; if (blobName) { - elements.push(`/${blobName}`); + elements.push("/" + blobName); } return elements.join(""); } function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { - const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + var version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; if (blobSASSignatureValues.snapshotTime && version < "2018-11-09") { throw RangeError("'version' must be >= '2018-11-09' when providing 'snapshotTime'."); } @@ -78557,18 +79891,17 @@ function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { return blobSASSignatureValues; } -// Copyright (c) Microsoft Corporation. /** * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}. */ -class BlobLeaseClient { +var BlobLeaseClient = /** @class */ (function () { /** * Creates an instance of BlobLeaseClient. * @param client - The client to make the lease operation requests. * @param leaseId - Initial proposed lease id. */ - constructor(client, leaseId) { - const clientContext = new StorageClientContext(client.url, client.pipeline.toServiceClientOptions()); + function BlobLeaseClient(client, leaseId) { + var clientContext = new StorageClientContext(client.url, client.pipeline.toServiceClientOptions()); this._url = client.url; if (client.name === undefined) { this._isContainer = true; @@ -78583,22 +79916,30 @@ class BlobLeaseClient { } this._leaseId = leaseId; } - /** - * Gets the lease Id. - * - * @readonly - */ - get leaseId() { - return this._leaseId; - } - /** - * Gets the url. - * - * @readonly - */ - get url() { - return this._url; - } + Object.defineProperty(BlobLeaseClient.prototype, "leaseId", { + /** + * Gets the lease Id. + * + * @readonly + */ + get: function () { + return this._leaseId; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobLeaseClient.prototype, "url", { + /** + * Gets the url. + * + * @readonly + */ + get: function () { + return this._url; + }, + enumerable: false, + configurable: true + }); /** * Establishes and manages a lock on a container for delete operations, or on a blob * for write and delete operations. @@ -78611,29 +79952,41 @@ class BlobLeaseClient { * @param options - option to configure lease management operations. * @returns Response data for acquire lease operation. */ - async acquireLease(duration, options = {}) { + BlobLeaseClient.prototype.acquireLease = function (duration, options) { var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-acquireLease", options); - if (this._isContainer && - ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || - (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || - ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); - } - try { - return await this._containerOrBlobOperation.acquireLease(Object.assign({ abortSignal: options.abortSignal, duration, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }), proposedLeaseId: this._leaseId }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _g, span, updatedOptions, e_1; + return tslib.__generator(this, function (_h) { + switch (_h.label) { + case 0: + _g = createSpan("BlobLeaseClient-acquireLease", options), span = _g.span, updatedOptions = _g.updatedOptions; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + _h.label = 1; + case 1: + _h.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this._containerOrBlobOperation.acquireLease(tslib.__assign({ abortSignal: options.abortSignal, duration: duration, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }), proposedLeaseId: this._leaseId }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _h.sent()]; + case 3: + e_1 = _h.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_1.message + }); + throw e_1; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * To change the ID of the lease. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container @@ -78644,31 +79997,44 @@ class BlobLeaseClient { * @param options - option to configure lease management operations. * @returns Response data for change lease operation. */ - async changeLease(proposedLeaseId, options = {}) { + BlobLeaseClient.prototype.changeLease = function (proposedLeaseId, options) { var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-changeLease", options); - if (this._isContainer && - ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || - (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || - ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); - } - try { - const response = await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - this._leaseId = proposedLeaseId; - return response; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _g, span, updatedOptions, response, e_2; + return tslib.__generator(this, function (_h) { + switch (_h.label) { + case 0: + _g = createSpan("BlobLeaseClient-changeLease", options), span = _g.span, updatedOptions = _g.updatedOptions; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + _h.label = 1; + case 1: + _h.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, tslib.__assign({ abortSignal: options.abortSignal, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: + response = _h.sent(); + this._leaseId = proposedLeaseId; + return [2 /*return*/, response]; + case 3: + e_2 = _h.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_2.message + }); + throw e_2; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * To free the lease if it is no longer needed so that another client may * immediately acquire a lease against the container or the blob. @@ -78679,29 +80045,41 @@ class BlobLeaseClient { * @param options - option to configure lease management operations. * @returns Response data for release lease operation. */ - async releaseLease(options = {}) { + BlobLeaseClient.prototype.releaseLease = function (options) { var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-releaseLease", options); - if (this._isContainer && - ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || - (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || - ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); - } - try { - return await this._containerOrBlobOperation.releaseLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _g, span, updatedOptions, e_3; + return tslib.__generator(this, function (_h) { + switch (_h.label) { + case 0: + _g = createSpan("BlobLeaseClient-releaseLease", options), span = _g.span, updatedOptions = _g.updatedOptions; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + _h.label = 1; + case 1: + _h.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this._containerOrBlobOperation.releaseLease(this._leaseId, tslib.__assign({ abortSignal: options.abortSignal, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _h.sent()]; + case 3: + e_3 = _h.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_3.message + }); + throw e_3; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * To renew the lease. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container @@ -78711,29 +80089,41 @@ class BlobLeaseClient { * @param options - Optional option to configure lease management operations. * @returns Response data for renew lease operation. */ - async renewLease(options = {}) { + BlobLeaseClient.prototype.renewLease = function (options) { var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-renewLease", options); - if (this._isContainer && - ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || - (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || - ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); - } - try { - return await this._containerOrBlobOperation.renewLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _g, span, updatedOptions, e_4; + return tslib.__generator(this, function (_h) { + switch (_h.label) { + case 0: + _g = createSpan("BlobLeaseClient-renewLease", options), span = _g.span, updatedOptions = _g.updatedOptions; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + _h.label = 1; + case 1: + _h.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this._containerOrBlobOperation.renewLease(this._leaseId, tslib.__assign({ abortSignal: options.abortSignal, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _h.sent()]; + case 3: + e_4 = _h.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_4.message + }); + throw e_4; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * To end the lease but ensure that another client cannot acquire a new lease * until the current lease period has expired. @@ -78745,31 +80135,44 @@ class BlobLeaseClient { * @param options - Optional options to configure lease management operations. * @returns Response data for break lease operation. */ - async breakLease(breakPeriod, options = {}) { + BlobLeaseClient.prototype.breakLease = function (breakPeriod, options) { var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-breakLease", options); - if (this._isContainer && - ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || - (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || - ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); - } - try { - const operationOptions = Object.assign({ abortSignal: options.abortSignal, breakPeriod, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)); - return await this._containerOrBlobOperation.breakLease(operationOptions); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _g, span, updatedOptions, operationOptions, e_5; + return tslib.__generator(this, function (_h) { + switch (_h.label) { + case 0: + _g = createSpan("BlobLeaseClient-breakLease", options), span = _g.span, updatedOptions = _g.updatedOptions; + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + _h.label = 1; + case 1: + _h.trys.push([1, 3, 4, 5]); + operationOptions = tslib.__assign({ abortSignal: options.abortSignal, breakPeriod: breakPeriod, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)); + return [4 /*yield*/, this._containerOrBlobOperation.breakLease(operationOptions)]; + case 2: return [2 /*return*/, _h.sent()]; + case 3: + e_5 = _h.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_5.message + }); + throw e_5; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } -} + }); + }; + return BlobLeaseClient; +}()); // Copyright (c) Microsoft Corporation. /** @@ -78777,7 +80180,8 @@ class BlobLeaseClient { * * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends. */ -class RetriableReadableStream extends stream.Readable { +var RetriableReadableStream = /** @class */ (function (_super) { + tslib.__extends(RetriableReadableStream, _super); /** * Creates an instance of RetriableReadableStream. * @@ -78788,31 +80192,32 @@ class RetriableReadableStream extends stream.Readable { * @param count - How much data in original data source to read * @param options - */ - constructor(source, getter, offset, count, options = {}) { - super({ highWaterMark: options.highWaterMark }); - this.retries = 0; - this.sourceDataHandler = (data) => { - if (this.options.doInjectErrorOnce) { - this.options.doInjectErrorOnce = undefined; - this.source.pause(); - this.source.removeAllListeners("data"); - this.source.emit("end"); + function RetriableReadableStream(source, getter, offset, count, options) { + if (options === void 0) { options = {}; } + var _this = _super.call(this, { highWaterMark: options.highWaterMark }) || this; + _this.retries = 0; + _this.sourceDataHandler = function (data) { + if (_this.options.doInjectErrorOnce) { + _this.options.doInjectErrorOnce = undefined; + _this.source.pause(); + _this.source.removeAllListeners("data"); + _this.source.emit("end"); return; } // console.log( // `Offset: ${this.offset}, Received ${data.length} from internal stream` // ); - this.offset += data.length; - if (this.onProgress) { - this.onProgress({ loadedBytes: this.offset - this.start }); + _this.offset += data.length; + if (_this.onProgress) { + _this.onProgress({ loadedBytes: _this.offset - _this.start }); } - if (!this.push(data)) { - this.source.pause(); + if (!_this.push(data)) { + _this.source.pause(); } }; - this.sourceErrorOrEndHandler = (err) => { + _this.sourceErrorOrEndHandler = function (err) { if (err && err.name === "AbortError") { - this.destroy(err); + _this.destroy(err); return; } // console.log( @@ -78820,67 +80225,69 @@ class RetriableReadableStream extends stream.Readable { // this.offset // }, dest end : ${this.end}` // ); - this.removeSourceEventHandlers(); - if (this.offset - 1 === this.end) { - this.push(null); + _this.removeSourceEventHandlers(); + if (_this.offset - 1 === _this.end) { + _this.push(null); } - else if (this.offset <= this.end) { + else if (_this.offset <= _this.end) { // console.log( // `retries: ${this.retries}, max retries: ${this.maxRetries}` // ); - if (this.retries < this.maxRetryRequests) { - this.retries += 1; - this.getter(this.offset) - .then((newSource) => { - this.source = newSource; - this.setSourceEventHandlers(); + if (_this.retries < _this.maxRetryRequests) { + _this.retries += 1; + _this.getter(_this.offset) + .then(function (newSource) { + _this.source = newSource; + _this.setSourceEventHandlers(); }) - .catch((error) => { - this.destroy(error); + .catch(function (error) { + _this.destroy(error); }); } else { - this.destroy(new Error( + _this.destroy(new Error( // tslint:disable-next-line:max-line-length - `Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this - .offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); + "Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: " + (_this + .offset - 1) + ", data needed offset: " + _this.end + ", retries: " + _this.retries + ", max retries: " + _this.maxRetryRequests)); } } else { - this.destroy(new Error(`Data corruption failure: Received more data than original request, data needed offset is ${this.end}, received offset: ${this.offset - 1}`)); + _this.destroy(new Error("Data corruption failure: Received more data than original request, data needed offset is " + _this.end + ", received offset: " + (_this.offset - 1))); } }; - this.getter = getter; - this.source = source; - this.start = offset; - this.offset = offset; - this.end = offset + count - 1; - this.maxRetryRequests = + _this.getter = getter; + _this.source = source; + _this.start = offset; + _this.offset = offset; + _this.end = offset + count - 1; + _this.maxRetryRequests = options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0; - this.onProgress = options.onProgress; - this.options = options; - this.setSourceEventHandlers(); + _this.onProgress = options.onProgress; + _this.options = options; + _this.setSourceEventHandlers(); + return _this; } - _read() { + RetriableReadableStream.prototype._read = function () { this.source.resume(); - } - setSourceEventHandlers() { + }; + RetriableReadableStream.prototype.setSourceEventHandlers = function () { this.source.on("data", this.sourceDataHandler); this.source.on("end", this.sourceErrorOrEndHandler); this.source.on("error", this.sourceErrorOrEndHandler); - } - removeSourceEventHandlers() { + }; + RetriableReadableStream.prototype.removeSourceEventHandlers = function () { this.source.removeListener("data", this.sourceDataHandler); this.source.removeListener("end", this.sourceErrorOrEndHandler); this.source.removeListener("error", this.sourceErrorOrEndHandler); - } - _destroy(error, callback) { + }; + RetriableReadableStream.prototype._destroy = function (error, callback) { // remove listener from source and release source this.removeSourceEventHandlers(); this.source.destroy(); callback(error === null ? undefined : error); - } -} + }; + return RetriableReadableStream; +}(stream.Readable)); // Copyright (c) Microsoft Corporation. /** @@ -78893,7 +80300,7 @@ class RetriableReadableStream extends stream.Readable { * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js * Readable stream. */ -class BlobDownloadResponse { +var BlobDownloadResponse = /** @class */ (function () { /** * Creates an instance of BlobDownloadResponse. * @@ -78903,1399 +80310,2076 @@ class BlobDownloadResponse { * @param count - * @param options - */ - constructor(originalResponse, getter, offset, count, options = {}) { + function BlobDownloadResponse(originalResponse, getter, offset, count, options) { + if (options === void 0) { options = {}; } this.originalResponse = originalResponse; this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); } - /** - * Indicates that the service supports - * requests for partial file content. - * - * @readonly - */ - get acceptRanges() { - return this.originalResponse.acceptRanges; - } - /** - * Returns if it was previously specified - * for the file. - * - * @readonly - */ - get cacheControl() { - return this.originalResponse.cacheControl; - } - /** - * Returns the value that was specified - * for the 'x-ms-content-disposition' header and specifies how to process the - * response. - * - * @readonly - */ - get contentDisposition() { - return this.originalResponse.contentDisposition; - } - /** - * Returns the value that was specified - * for the Content-Encoding request header. - * - * @readonly - */ - get contentEncoding() { - return this.originalResponse.contentEncoding; - } - /** - * Returns the value that was specified - * for the Content-Language request header. - * - * @readonly - */ - get contentLanguage() { - return this.originalResponse.contentLanguage; - } - /** - * The current sequence number for a - * page blob. This header is not returned for block blobs or append blobs. - * - * @readonly - */ - get blobSequenceNumber() { - return this.originalResponse.blobSequenceNumber; - } - /** - * The blob's type. Possible values include: - * 'BlockBlob', 'PageBlob', 'AppendBlob'. - * - * @readonly - */ - get blobType() { - return this.originalResponse.blobType; - } - /** - * The number of bytes present in the - * response body. - * - * @readonly - */ - get contentLength() { - return this.originalResponse.contentLength; - } - /** - * If the file has an MD5 hash and the - * request is to read the full file, this response header is returned so that - * the client can check for message content integrity. If the request is to - * read a specified range and the 'x-ms-range-get-content-md5' is set to - * true, then the request returns an MD5 hash for the range, as long as the - * range size is less than or equal to 4 MB. If neither of these sets of - * conditions is true, then no value is returned for the 'Content-MD5' - * header. - * - * @readonly - */ - get contentMD5() { - return this.originalResponse.contentMD5; - } - /** - * Indicates the range of bytes returned if - * the client requested a subset of the file by setting the Range request - * header. - * - * @readonly - */ - get contentRange() { - return this.originalResponse.contentRange; - } - /** - * The content type specified for the file. - * The default content type is 'application/octet-stream' - * - * @readonly - */ - get contentType() { - return this.originalResponse.contentType; - } - /** - * Conclusion time of the last attempted - * Copy File operation where this file was the destination file. This value - * can specify the time of a completed, aborted, or failed copy attempt. - * - * @readonly - */ - get copyCompletedOn() { - return this.originalResponse.copyCompletedOn; - } - /** - * String identifier for the last attempted Copy - * File operation where this file was the destination file. - * - * @readonly - */ - get copyId() { - return this.originalResponse.copyId; - } - /** - * Contains the number of bytes copied and - * the total bytes in the source in the last attempted Copy File operation - * where this file was the destination file. Can show between 0 and - * Content-Length bytes copied. - * - * @readonly - */ - get copyProgress() { - return this.originalResponse.copyProgress; - } - /** - * URL up to 2KB in length that specifies the - * source file used in the last attempted Copy File operation where this file - * was the destination file. - * - * @readonly - */ - get copySource() { - return this.originalResponse.copySource; - } - /** - * State of the copy operation - * identified by 'x-ms-copy-id'. Possible values include: 'pending', - * 'success', 'aborted', 'failed' - * - * @readonly - */ - get copyStatus() { - return this.originalResponse.copyStatus; - } - /** - * Only appears when - * x-ms-copy-status is failed or pending. Describes cause of fatal or - * non-fatal copy operation failure. - * - * @readonly - */ - get copyStatusDescription() { - return this.originalResponse.copyStatusDescription; - } - /** - * When a blob is leased, - * specifies whether the lease is of infinite or fixed duration. Possible - * values include: 'infinite', 'fixed'. - * - * @readonly - */ - get leaseDuration() { - return this.originalResponse.leaseDuration; - } - /** - * Lease state of the blob. Possible - * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. - * - * @readonly - */ - get leaseState() { - return this.originalResponse.leaseState; - } - /** - * The current lease status of the - * blob. Possible values include: 'locked', 'unlocked'. - * - * @readonly - */ - get leaseStatus() { - return this.originalResponse.leaseStatus; - } - /** - * A UTC date/time value generated by the service that - * indicates the time at which the response was initiated. - * - * @readonly - */ - get date() { - return this.originalResponse.date; - } - /** - * The number of committed blocks - * present in the blob. This header is returned only for append blobs. - * - * @readonly - */ - get blobCommittedBlockCount() { - return this.originalResponse.blobCommittedBlockCount; - } - /** - * The ETag contains a value that you can use to - * perform operations conditionally, in quotes. - * - * @readonly - */ - get etag() { - return this.originalResponse.etag; - } - /** - * The number of tags associated with the blob - * - * @readonly - */ - get tagCount() { - return this.originalResponse.tagCount; - } - /** - * The error code. - * - * @readonly - */ - get errorCode() { - return this.originalResponse.errorCode; - } - /** - * The value of this header is set to - * true if the file data and application metadata are completely encrypted - * using the specified algorithm. Otherwise, the value is set to false (when - * the file is unencrypted, or if only parts of the file/application metadata - * are encrypted). - * - * @readonly - */ - get isServerEncrypted() { - return this.originalResponse.isServerEncrypted; - } - /** - * If the blob has a MD5 hash, and if - * request contains range header (Range or x-ms-range), this response header - * is returned with the value of the whole blob's MD5 value. This value may - * or may not be equal to the value returned in Content-MD5 header, with the - * latter calculated from the requested range. - * - * @readonly - */ - get blobContentMD5() { - return this.originalResponse.blobContentMD5; - } - /** - * Returns the date and time the file was last - * modified. Any operation that modifies the file or its properties updates - * the last modified time. - * - * @readonly - */ - get lastModified() { - return this.originalResponse.lastModified; - } - /** - * Returns the UTC date and time generated by the service that indicates the time at which the blob was - * last read or written to. - * - * @readonly - */ - get lastAccessed() { - return this.originalResponse.lastAccessed; - } - /** - * A name-value pair - * to associate with a file storage object. - * - * @readonly - */ - get metadata() { - return this.originalResponse.metadata; - } - /** - * This header uniquely identifies the request - * that was made and can be used for troubleshooting the request. - * - * @readonly - */ - get requestId() { - return this.originalResponse.requestId; - } - /** - * If a client request id header is sent in the request, this header will be present in the - * response with the same value. - * - * @readonly - */ - get clientRequestId() { - return this.originalResponse.clientRequestId; - } - /** - * Indicates the version of the Blob service used - * to execute the request. - * - * @readonly - */ - get version() { - return this.originalResponse.version; - } - /** - * Indicates the versionId of the downloaded blob version. - * - * @readonly - */ - get versionId() { - return this.originalResponse.versionId; - } - /** - * Indicates whether version of this blob is a current version. - * - * @readonly - */ - get isCurrentVersion() { - return this.originalResponse.isCurrentVersion; - } - /** - * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned - * when the blob was encrypted with a customer-provided key. - * - * @readonly - */ - get encryptionKeySha256() { - return this.originalResponse.encryptionKeySha256; - } - /** - * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to - * true, then the request returns a crc64 for the range, as long as the range size is less than - * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is - * specified in the same request, it will fail with 400(Bad Request) - */ - get contentCrc64() { - return this.originalResponse.contentCrc64; - } - /** - * Object Replication Policy Id of the destination blob. - * - * @readonly - */ - get objectReplicationDestinationPolicyId() { - return this.originalResponse.objectReplicationDestinationPolicyId; - } - /** - * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. - * - * @readonly - */ - get objectReplicationSourceProperties() { - return this.originalResponse.objectReplicationSourceProperties; - } - /** - * If this blob has been sealed. - * - * @readonly - */ - get isSealed() { - return this.originalResponse.isSealed; - } - /** - * The response body as a browser Blob. - * Always undefined in node.js. - * - * @readonly - */ - get contentAsBlob() { - return this.originalResponse.blobBody; - } - /** - * The response body as a node.js Readable stream. - * Always undefined in the browser. - * - * It will automatically retry when internal read stream unexpected ends. - * - * @readonly - */ - get readableStreamBody() { - return coreHttp.isNode ? this.blobDownloadStream : undefined; - } - /** - * The HTTP response. - */ - get _response() { - return this.originalResponse._response; - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const AVRO_SYNC_MARKER_SIZE = 16; -const AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]); -const AVRO_CODEC_KEY = "avro.codec"; -const AVRO_SCHEMA_KEY = "avro.schema"; - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -function arraysEqual(a, b) { - if (a === b) - return true; - if (a == null || b == null) - return false; - if (a.length != b.length) - return false; - for (let i = 0; i < a.length; ++i) { - if (a[i] !== b[i]) - return false; - } - return true; -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -class AvroParser { - /** - * Reads a fixed number of bytes from the stream. - * - * @param stream - - * @param length - - * @param options - - */ - static async readFixedBytes(stream, length, options = {}) { - const bytes = await stream.read(length, { abortSignal: options.abortSignal }); - if (bytes.length != length) { - throw new Error("Hit stream end."); - } - return bytes; - } - /** - * Reads a single byte from the stream. - * - * @param stream - - * @param options - - */ - static async readByte(stream, options = {}) { - const buf = await AvroParser.readFixedBytes(stream, 1, options); - return buf[0]; - } - // int and long are stored in variable-length zig-zag coding. - // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt - // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types - static async readZigZagLong(stream, options = {}) { - let zigZagEncoded = 0; - let significanceInBit = 0; - let byte, haveMoreByte, significanceInFloat; - do { - byte = await AvroParser.readByte(stream, options); - haveMoreByte = byte & 0x80; - zigZagEncoded |= (byte & 0x7f) << significanceInBit; - significanceInBit += 7; - } while (haveMoreByte && significanceInBit < 28); // bitwise operation only works for 32-bit integers - if (haveMoreByte) { - // Switch to float arithmetic - zigZagEncoded = zigZagEncoded; - significanceInFloat = 268435456; // 2 ** 28. - do { - byte = await AvroParser.readByte(stream, options); - zigZagEncoded += (byte & 0x7f) * significanceInFloat; - significanceInFloat *= 128; // 2 ** 7 - } while (byte & 0x80); - const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2; - if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) { - throw new Error("Integer overflow."); - } - return res; - } - return (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1); - } - static async readLong(stream, options = {}) { - return AvroParser.readZigZagLong(stream, options); - } - static async readInt(stream, options = {}) { - return AvroParser.readZigZagLong(stream, options); - } - static async readNull() { - return null; - } - static async readBoolean(stream, options = {}) { - const b = await AvroParser.readByte(stream, options); - if (b == 1) { - return true; - } - else if (b == 0) { - return false; - } - else { - throw new Error("Byte was not a boolean."); - } - } - static async readFloat(stream, options = {}) { - const u8arr = await AvroParser.readFixedBytes(stream, 4, options); - const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); - return view.getFloat32(0, true); // littleEndian = true - } - static async readDouble(stream, options = {}) { - const u8arr = await AvroParser.readFixedBytes(stream, 8, options); - const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); - return view.getFloat64(0, true); // littleEndian = true - } - static async readBytes(stream, options = {}) { - const size = await AvroParser.readLong(stream, options); - if (size < 0) { - throw new Error("Bytes size was negative."); - } - return await stream.read(size, { abortSignal: options.abortSignal }); - } - static async readString(stream, options = {}) { - const u8arr = await AvroParser.readBytes(stream, options); - // polyfill TextDecoder to be backward compatible with older - // nodejs that doesn't expose TextDecoder as a global variable - if (typeof TextDecoder === "undefined" && "function" !== "undefined") { - global.TextDecoder = __webpack_require__(669).TextDecoder; - } - // FUTURE: need TextDecoder polyfill for IE - const utf8decoder = new TextDecoder(); - return utf8decoder.decode(u8arr); - } - static async readMapPair(stream, readItemMethod, options = {}) { - const key = await AvroParser.readString(stream, options); - // FUTURE: this won't work with readFixed (currently not supported) which needs a length as the parameter. - const value = await readItemMethod(stream, options); - return { key, value }; - } - static async readMap(stream, readItemMethod, options = {}) { - const readPairMethod = async (stream, options = {}) => { - return await AvroParser.readMapPair(stream, readItemMethod, options); - }; - const pairs = await AvroParser.readArray(stream, readPairMethod, options); - const dict = {}; - for (const pair of pairs) { - dict[pair.key] = pair.value; - } - return dict; - } - static async readArray(stream, readItemMethod, options = {}) { - const items = []; - for (let count = await AvroParser.readLong(stream, options); count != 0; count = await AvroParser.readLong(stream, options)) { - if (count < 0) { - // Ignore block sizes - await AvroParser.readLong(stream, options); - count = -count; - } - while (count--) { - const item = await readItemMethod(stream, options); - items.push(item); - } - } - return items; - } -} -var AvroComplex; -(function (AvroComplex) { - AvroComplex["RECORD"] = "record"; - AvroComplex["ENUM"] = "enum"; - AvroComplex["ARRAY"] = "array"; - AvroComplex["MAP"] = "map"; - AvroComplex["UNION"] = "union"; - AvroComplex["FIXED"] = "fixed"; -})(AvroComplex || (AvroComplex = {})); -class AvroType { - /** - * Determines the AvroType from the Avro Schema. - */ - static fromSchema(schema) { - if (typeof schema === "string") { - return AvroType.fromStringSchema(schema); - } - else if (Array.isArray(schema)) { - return AvroType.fromArraySchema(schema); - } - else { - return AvroType.fromObjectSchema(schema); - } - } - static fromStringSchema(schema) { - switch (schema) { - case AvroPrimitive.NULL: - case AvroPrimitive.BOOLEAN: - case AvroPrimitive.INT: - case AvroPrimitive.LONG: - case AvroPrimitive.FLOAT: - case AvroPrimitive.DOUBLE: - case AvroPrimitive.BYTES: - case AvroPrimitive.STRING: - return new AvroPrimitiveType(schema); - default: - throw new Error(`Unexpected Avro type ${schema}`); - } - } - static fromArraySchema(schema) { - return new AvroUnionType(schema.map(AvroType.fromSchema)); - } - static fromObjectSchema(schema) { - const type = schema.type; - // Primitives can be defined as strings or objects - try { - return AvroType.fromStringSchema(type); - } - catch (err) { } - switch (type) { - case AvroComplex.RECORD: - if (schema.aliases) { - throw new Error(`aliases currently is not supported, schema: ${schema}`); - } - if (!schema.name) { - throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`); - } - const fields = {}; - if (!schema.fields) { - throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`); - } - for (const field of schema.fields) { - fields[field.name] = AvroType.fromSchema(field.type); - } - return new AvroRecordType(fields, schema.name); - case AvroComplex.ENUM: - if (schema.aliases) { - throw new Error(`aliases currently is not supported, schema: ${schema}`); - } - if (!schema.symbols) { - throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`); - } - return new AvroEnumType(schema.symbols); - case AvroComplex.MAP: - if (!schema.values) { - throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`); - } - return new AvroMapType(AvroType.fromSchema(schema.values)); - case AvroComplex.ARRAY: // Unused today - case AvroComplex.FIXED: // Unused today - default: - throw new Error(`Unexpected Avro type ${type} in ${schema}`); - } - } -} -var AvroPrimitive; -(function (AvroPrimitive) { - AvroPrimitive["NULL"] = "null"; - AvroPrimitive["BOOLEAN"] = "boolean"; - AvroPrimitive["INT"] = "int"; - AvroPrimitive["LONG"] = "long"; - AvroPrimitive["FLOAT"] = "float"; - AvroPrimitive["DOUBLE"] = "double"; - AvroPrimitive["BYTES"] = "bytes"; - AvroPrimitive["STRING"] = "string"; -})(AvroPrimitive || (AvroPrimitive = {})); -class AvroPrimitiveType extends AvroType { - constructor(primitive) { - super(); - this._primitive = primitive; - } - async read(stream, options = {}) { - switch (this._primitive) { - case AvroPrimitive.NULL: - return await AvroParser.readNull(); - case AvroPrimitive.BOOLEAN: - return await AvroParser.readBoolean(stream, options); - case AvroPrimitive.INT: - return await AvroParser.readInt(stream, options); - case AvroPrimitive.LONG: - return await AvroParser.readLong(stream, options); - case AvroPrimitive.FLOAT: - return await AvroParser.readFloat(stream, options); - case AvroPrimitive.DOUBLE: - return await AvroParser.readDouble(stream, options); - case AvroPrimitive.BYTES: - return await AvroParser.readBytes(stream, options); - case AvroPrimitive.STRING: - return await AvroParser.readString(stream, options); - default: - throw new Error("Unknown Avro Primitive"); - } - } -} -class AvroEnumType extends AvroType { - constructor(symbols) { - super(); - this._symbols = symbols; - } - async read(stream, options = {}) { - const value = await AvroParser.readInt(stream, options); - return this._symbols[value]; - } -} -class AvroUnionType extends AvroType { - constructor(types) { - super(); - this._types = types; - } - async read(stream, options = {}) { - const typeIndex = await AvroParser.readInt(stream, options); - return await this._types[typeIndex].read(stream, options); - } -} -class AvroMapType extends AvroType { - constructor(itemType) { - super(); - this._itemType = itemType; - } - async read(stream, options = {}) { - const readItemMethod = async (s, options) => { - return await this._itemType.read(s, options); - }; - return await AvroParser.readMap(stream, readItemMethod, options); - } -} -class AvroRecordType extends AvroType { - constructor(fields, name) { - super(); - this._fields = fields; - this._name = name; - } - async read(stream, options = {}) { - const record = {}; - record["$schema"] = this._name; - for (const key in this._fields) { - if (this._fields.hasOwnProperty(key)) { - record[key] = await this._fields[key].read(stream, options); - } - } - return record; - } -} - -// Copyright (c) Microsoft Corporation. -class AvroReader { - constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { - this._dataStream = dataStream; - this._headerStream = headerStream || dataStream; - this._initialized = false; - this._blockOffset = currentBlockOffset || 0; - this._objectIndex = indexWithinCurrentBlock || 0; - this._initialBlockOffset = currentBlockOffset || 0; - } - get blockOffset() { - return this._blockOffset; - } - get objectIndex() { - return this._objectIndex; - } - async initialize(options = {}) { - const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { - abortSignal: options.abortSignal - }); - if (!arraysEqual(header, AVRO_INIT_BYTES)) { - throw new Error("Stream is not an Avro file."); - } - // File metadata is written as if defined by the following map schema: - // { "type": "map", "values": "bytes"} - this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, { - abortSignal: options.abortSignal - }); - // Validate codec - const codec = this._metadata[AVRO_CODEC_KEY]; - if (!(codec == undefined || codec == "null")) { - throw new Error("Codecs are not supported"); - } - // The 16-byte, randomly-generated sync marker for this file. - this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, { - abortSignal: options.abortSignal - }); - // Parse the schema - const schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]); - this._itemType = AvroType.fromSchema(schema); - if (this._blockOffset == 0) { - this._blockOffset = this._initialBlockOffset + this._dataStream.position; - } - this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, { - abortSignal: options.abortSignal - }); - // skip block length - await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); - this._initialized = true; - if (this._objectIndex && this._objectIndex > 0) { - for (let i = 0; i < this._objectIndex; i++) { - await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal }); - this._itemsRemainingInBlock--; - } - } - } - hasNext() { - return !this._initialized || this._itemsRemainingInBlock > 0; - } - parseObjects(options = {}) { - return tslib.__asyncGenerator(this, arguments, function* parseObjects_1() { - if (!this._initialized) { - yield tslib.__await(this.initialize(options)); - } - while (this.hasNext()) { - const result = yield tslib.__await(this._itemType.read(this._dataStream, { - abortSignal: options.abortSignal - })); - this._itemsRemainingInBlock--; - this._objectIndex++; - if (this._itemsRemainingInBlock == 0) { - const marker = yield tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, { - abortSignal: options.abortSignal - })); - this._blockOffset = this._initialBlockOffset + this._dataStream.position; - this._objectIndex = 0; - if (!arraysEqual(this._syncMarker, marker)) { - throw new Error("Stream is not a valid Avro file."); - } - try { - this._itemsRemainingInBlock = yield tslib.__await(AvroParser.readLong(this._dataStream, { - abortSignal: options.abortSignal - })); - } - catch (err) { - // We hit the end of the stream. - this._itemsRemainingInBlock = 0; - } - if (this._itemsRemainingInBlock > 0) { - // Ignore block size - yield tslib.__await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })); - } - } - yield yield tslib.__await(result); - } - }); - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -class AvroReadable { -} - -// Copyright (c) Microsoft Corporation. -const ABORT_ERROR = new abortController.AbortError("Reading from the avro stream was aborted."); -class AvroReadableFromStream extends AvroReadable { - constructor(readable) { - super(); - this._readable = readable; - this._position = 0; - } - toUint8Array(data) { - if (typeof data === "string") { - return Buffer.from(data); - } - return data; - } - get position() { - return this._position; - } - async read(size, options = {}) { - var _a; - if ((_a = options.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { - throw ABORT_ERROR; - } - if (size < 0) { - throw new Error(`size parameter should be positive: ${size}`); - } - if (size === 0) { - return new Uint8Array(); - } - if (!this._readable.readable) { - throw new Error("Stream no longer readable."); - } - // See if there is already enough data. - const chunk = this._readable.read(size); - if (chunk) { - this._position += chunk.length; - // chunk.length maybe less than desired size if the stream ends. - return this.toUint8Array(chunk); - } - else { - // register callback to wait for enough data to read - return new Promise((resolve, reject) => { - const cleanUp = () => { - this._readable.removeListener("readable", readableCallback); - this._readable.removeListener("error", rejectCallback); - this._readable.removeListener("end", rejectCallback); - this._readable.removeListener("close", rejectCallback); - if (options.abortSignal) { - options.abortSignal.removeEventListener("abort", abortHandler); - } - }; - const readableCallback = () => { - const chunk = this._readable.read(size); - if (chunk) { - this._position += chunk.length; - cleanUp(); - // chunk.length maybe less than desired size if the stream ends. - resolve(this.toUint8Array(chunk)); - } - }; - const rejectCallback = () => { - cleanUp(); - reject(); - }; - const abortHandler = () => { - cleanUp(); - reject(ABORT_ERROR); - }; - this._readable.on("readable", readableCallback); - this._readable.once("error", rejectCallback); - this._readable.once("end", rejectCallback); - this._readable.once("close", rejectCallback); - if (options.abortSignal) { - options.abortSignal.addEventListener("abort", abortHandler); - } - }); - } + Object.defineProperty(BlobDownloadResponse.prototype, "acceptRanges", { + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get: function () { + return this.originalResponse.acceptRanges; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "cacheControl", { + /** + * Returns if it was previously specified + * for the file. + * + * @readonly + */ + get: function () { + return this.originalResponse.cacheControl; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "contentDisposition", { + /** + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly + */ + get: function () { + return this.originalResponse.contentDisposition; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "contentEncoding", { + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get: function () { + return this.originalResponse.contentEncoding; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "contentLanguage", { + /** + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly + */ + get: function () { + return this.originalResponse.contentLanguage; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "blobSequenceNumber", { + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get: function () { + return this.originalResponse.blobSequenceNumber; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "blobType", { + /** + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly + */ + get: function () { + return this.originalResponse.blobType; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "contentLength", { + /** + * The number of bytes present in the + * response body. + * + * @readonly + */ + get: function () { + return this.originalResponse.contentLength; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "contentMD5", { + /** + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly + */ + get: function () { + return this.originalResponse.contentMD5; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "contentRange", { + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get: function () { + return this.originalResponse.contentRange; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "contentType", { + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get: function () { + return this.originalResponse.contentType; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "copyCompletedOn", { + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly + */ + get: function () { + return this.originalResponse.copyCompletedOn; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "copyId", { + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly + */ + get: function () { + return this.originalResponse.copyId; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "copyProgress", { + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get: function () { + return this.originalResponse.copyProgress; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "copySource", { + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get: function () { + return this.originalResponse.copySource; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "copyStatus", { + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get: function () { + return this.originalResponse.copyStatus; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "copyStatusDescription", { + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get: function () { + return this.originalResponse.copyStatusDescription; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "leaseDuration", { + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly + */ + get: function () { + return this.originalResponse.leaseDuration; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "leaseState", { + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get: function () { + return this.originalResponse.leaseState; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "leaseStatus", { + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get: function () { + return this.originalResponse.leaseStatus; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "date", { + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get: function () { + return this.originalResponse.date; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "blobCommittedBlockCount", { + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get: function () { + return this.originalResponse.blobCommittedBlockCount; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "etag", { + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get: function () { + return this.originalResponse.etag; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "tagCount", { + /** + * The number of tags associated with the blob + * + * @readonly + */ + get: function () { + return this.originalResponse.tagCount; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "errorCode", { + /** + * The error code. + * + * @readonly + */ + get: function () { + return this.originalResponse.errorCode; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "isServerEncrypted", { + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get: function () { + return this.originalResponse.isServerEncrypted; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "blobContentMD5", { + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get: function () { + return this.originalResponse.blobContentMD5; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "lastModified", { + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get: function () { + return this.originalResponse.lastModified; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "lastAccessed", { + /** + * Returns the UTC date and time generated by the service that indicates the time at which the blob was + * last read or written to. + * + * @readonly + */ + get: function () { + return this.originalResponse.lastAccessed; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "metadata", { + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get: function () { + return this.originalResponse.metadata; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "requestId", { + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get: function () { + return this.originalResponse.requestId; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "clientRequestId", { + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get: function () { + return this.originalResponse.clientRequestId; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "version", { + /** + * Indicates the version of the Blob service used + * to execute the request. + * + * @readonly + */ + get: function () { + return this.originalResponse.version; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "versionId", { + /** + * Indicates the versionId of the downloaded blob version. + * + * @readonly + */ + get: function () { + return this.originalResponse.versionId; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "isCurrentVersion", { + /** + * Indicates whether version of this blob is a current version. + * + * @readonly + */ + get: function () { + return this.originalResponse.isCurrentVersion; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "encryptionKeySha256", { + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get: function () { + return this.originalResponse.encryptionKeySha256; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "contentCrc64", { + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get: function () { + return this.originalResponse.contentCrc64; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "objectReplicationDestinationPolicyId", { + /** + * Object Replication Policy Id of the destination blob. + * + * @readonly + */ + get: function () { + return this.originalResponse.objectReplicationDestinationPolicyId; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "objectReplicationSourceProperties", { + /** + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + * + * @readonly + */ + get: function () { + return this.originalResponse.objectReplicationSourceProperties; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "isSealed", { + /** + * If this blob has been sealed. + * + * @readonly + */ + get: function () { + return this.originalResponse.isSealed; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "contentAsBlob", { + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get: function () { + return this.originalResponse.blobBody; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "readableStreamBody", { + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will automatically retry when internal read stream unexpected ends. + * + * @readonly + */ + get: function () { + return coreHttp.isNode ? this.blobDownloadStream : undefined; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobDownloadResponse.prototype, "_response", { + /** + * The HTTP response. + */ + get: function () { + return this.originalResponse._response; + }, + enumerable: false, + configurable: true + }); + return BlobDownloadResponse; +}()); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +var AVRO_SYNC_MARKER_SIZE = 16; +var AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]); +var AVRO_CODEC_KEY = "avro.codec"; +var AVRO_SCHEMA_KEY = "avro.schema"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +function arraysEqual(a, b) { + if (a === b) + return true; + if (a == null || b == null) + return false; + if (a.length != b.length) + return false; + for (var i = 0; i < a.length; ++i) { + if (a[i] !== b[i]) + return false; } + return true; } // Copyright (c) Microsoft Corporation. -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query. - */ -class BlobQuickQueryStream extends stream.Readable { +var AvroParser = /** @class */ (function () { + function AvroParser() { + } /** - * Creates an instance of BlobQuickQueryStream. + * Reads a fixed number of bytes from the stream. * - * @param source - The current ReadableStream returned from getter + * @param stream - + * @param length - * @param options - */ - constructor(source, options = {}) { - super(); - this.avroPaused = true; - this.source = source; - this.onProgress = options.onProgress; - this.onError = options.onError; - this.avroReader = new AvroReader(new AvroReadableFromStream(this.source)); - this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal }); - } - _read() { - if (this.avroPaused) { - this.readInternal().catch((err) => { - this.emit("error", err); + AvroParser.readFixedBytes = function (stream, length, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var bytes; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, stream.read(length, { abortSignal: options.abortSignal })]; + case 1: + bytes = _a.sent(); + if (bytes.length != length) { + throw new Error("Hit stream end."); + } + return [2 /*return*/, bytes]; + } }); - } - } - async readInternal() { - this.avroPaused = false; - let avroNext; - do { - avroNext = await this.avroIter.next(); - if (avroNext.done) { - break; - } - const obj = avroNext.value; - const schema = obj.$schema; - if (typeof schema !== "string") { - throw Error("Missing schema in avro record."); - } - switch (schema) { - case "com.microsoft.azure.storage.queryBlobContents.resultData": - const data = obj.data; - if (data instanceof Uint8Array === false) { - throw Error("Invalid data in avro result record."); - } - if (!this.push(Buffer.from(data))) { - this.avroPaused = true; - } - break; - case "com.microsoft.azure.storage.queryBlobContents.progress": - const bytesScanned = obj.bytesScanned; - if (typeof bytesScanned !== "number") { - throw Error("Invalid bytesScanned in avro progress record."); - } - if (this.onProgress) { - this.onProgress({ loadedBytes: bytesScanned }); - } - break; - case "com.microsoft.azure.storage.queryBlobContents.end": - if (this.onProgress) { - const totalBytes = obj.totalBytes; - if (typeof totalBytes !== "number") { - throw Error("Invalid totalBytes in avro end record."); + }); + }; + /** + * Reads a single byte from the stream. + * + * @param stream - + * @param options - + */ + AvroParser.readByte = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var buf; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, AvroParser.readFixedBytes(stream, 1, options)]; + case 1: + buf = _a.sent(); + return [2 /*return*/, buf[0]]; + } + }); + }); + }; + // int and long are stored in variable-length zig-zag coding. + // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt + // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types + AvroParser.readZigZagLong = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var zigZagEncoded, significanceInBit, byte, haveMoreByte, significanceInFloat, res; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + zigZagEncoded = 0; + significanceInBit = 0; + _a.label = 1; + case 1: return [4 /*yield*/, AvroParser.readByte(stream, options)]; + case 2: + byte = _a.sent(); + haveMoreByte = byte & 0x80; + zigZagEncoded |= (byte & 0x7f) << significanceInBit; + significanceInBit += 7; + _a.label = 3; + case 3: + if (haveMoreByte && significanceInBit < 28) return [3 /*break*/, 1]; + _a.label = 4; + case 4: + if (!haveMoreByte) return [3 /*break*/, 9]; + // Switch to float arithmetic + zigZagEncoded = zigZagEncoded; + significanceInFloat = 268435456; // 2 ** 28. + _a.label = 5; + case 5: return [4 /*yield*/, AvroParser.readByte(stream, options)]; + case 6: + byte = _a.sent(); + zigZagEncoded += (byte & 0x7f) * significanceInFloat; + significanceInFloat *= 128; // 2 ** 7 + _a.label = 7; + case 7: + if (byte & 0x80) return [3 /*break*/, 5]; + _a.label = 8; + case 8: + res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2; + if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) { + throw new Error("Integer overflow."); } - this.onProgress({ loadedBytes: totalBytes }); - } - this.push(null); - break; - case "com.microsoft.azure.storage.queryBlobContents.error": - if (this.onError) { - const fatal = obj.fatal; - if (typeof fatal !== "boolean") { - throw Error("Invalid fatal in avro error record."); + return [2 /*return*/, res]; + case 9: return [2 /*return*/, (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1)]; + } + }); + }); + }; + AvroParser.readLong = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + return [2 /*return*/, AvroParser.readZigZagLong(stream, options)]; + }); + }); + }; + AvroParser.readInt = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + return [2 /*return*/, AvroParser.readZigZagLong(stream, options)]; + }); + }); + }; + AvroParser.readNull = function () { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + return [2 /*return*/, null]; + }); + }); + }; + AvroParser.readBoolean = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var b; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, AvroParser.readByte(stream, options)]; + case 1: + b = _a.sent(); + if (b == 1) { + return [2 /*return*/, true]; } - const name = obj.name; - if (typeof name !== "string") { - throw Error("Invalid name in avro error record."); + else if (b == 0) { + return [2 /*return*/, false]; } - const description = obj.description; - if (typeof description !== "string") { - throw Error("Invalid description in avro error record."); + else { + throw new Error("Byte was not a boolean."); } - const position = obj.position; - if (typeof position !== "number") { - throw Error("Invalid position in avro error record."); + } + }); + }); + }; + AvroParser.readFloat = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var u8arr, view; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, AvroParser.readFixedBytes(stream, 4, options)]; + case 1: + u8arr = _a.sent(); + view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return [2 /*return*/, view.getFloat32(0, true)]; // littleEndian = true + } + }); + }); + }; + AvroParser.readDouble = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var u8arr, view; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, AvroParser.readFixedBytes(stream, 8, options)]; + case 1: + u8arr = _a.sent(); + view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return [2 /*return*/, view.getFloat64(0, true)]; // littleEndian = true + } + }); + }); + }; + AvroParser.readBytes = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var size; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, AvroParser.readLong(stream, options)]; + case 1: + size = _a.sent(); + if (size < 0) { + throw new Error("Bytes size was negative."); } - this.onError({ - position, - name, - isFatal: fatal, - description - }); - } - break; - default: - throw Error(`Unknown schema ${schema} in avro progress record.`); - } - } while (!avroNext.done && !this.avroPaused); - } -} - -// Copyright (c) Microsoft Corporation. -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will - * parse avor data returned by blob query. - */ -class BlobQueryResponse { - /** - * Creates an instance of BlobQueryResponse. - * - * @param originalResponse - - * @param options - - */ - constructor(originalResponse, options = {}) { - this.originalResponse = originalResponse; - this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); - } - /** - * Indicates that the service supports - * requests for partial file content. - * - * @readonly - */ - get acceptRanges() { - return this.originalResponse.acceptRanges; - } - /** - * Returns if it was previously specified - * for the file. - * - * @readonly - */ - get cacheControl() { - return this.originalResponse.cacheControl; - } - /** - * Returns the value that was specified - * for the 'x-ms-content-disposition' header and specifies how to process the - * response. - * - * @readonly - */ - get contentDisposition() { - return this.originalResponse.contentDisposition; - } - /** - * Returns the value that was specified - * for the Content-Encoding request header. - * - * @readonly - */ - get contentEncoding() { - return this.originalResponse.contentEncoding; - } - /** - * Returns the value that was specified - * for the Content-Language request header. - * - * @readonly - */ - get contentLanguage() { - return this.originalResponse.contentLanguage; - } - /** - * The current sequence number for a - * page blob. This header is not returned for block blobs or append blobs. - * - * @readonly - */ - get blobSequenceNumber() { - return this.originalResponse.blobSequenceNumber; - } - /** - * The blob's type. Possible values include: - * 'BlockBlob', 'PageBlob', 'AppendBlob'. - * - * @readonly - */ - get blobType() { - return this.originalResponse.blobType; - } - /** - * The number of bytes present in the - * response body. - * - * @readonly - */ - get contentLength() { - return this.originalResponse.contentLength; - } - /** - * If the file has an MD5 hash and the - * request is to read the full file, this response header is returned so that - * the client can check for message content integrity. If the request is to - * read a specified range and the 'x-ms-range-get-content-md5' is set to - * true, then the request returns an MD5 hash for the range, as long as the - * range size is less than or equal to 4 MB. If neither of these sets of - * conditions is true, then no value is returned for the 'Content-MD5' - * header. - * - * @readonly - */ - get contentMD5() { - return this.originalResponse.contentMD5; - } - /** - * Indicates the range of bytes returned if - * the client requested a subset of the file by setting the Range request - * header. - * - * @readonly - */ - get contentRange() { - return this.originalResponse.contentRange; - } - /** - * The content type specified for the file. - * The default content type is 'application/octet-stream' - * - * @readonly - */ - get contentType() { - return this.originalResponse.contentType; - } - /** - * Conclusion time of the last attempted - * Copy File operation where this file was the destination file. This value - * can specify the time of a completed, aborted, or failed copy attempt. - * - * @readonly - */ - get copyCompletedOn() { - return undefined; - } - /** - * String identifier for the last attempted Copy - * File operation where this file was the destination file. - * - * @readonly - */ - get copyId() { - return this.originalResponse.copyId; - } - /** - * Contains the number of bytes copied and - * the total bytes in the source in the last attempted Copy File operation - * where this file was the destination file. Can show between 0 and - * Content-Length bytes copied. - * - * @readonly - */ - get copyProgress() { - return this.originalResponse.copyProgress; - } - /** - * URL up to 2KB in length that specifies the - * source file used in the last attempted Copy File operation where this file - * was the destination file. - * - * @readonly - */ - get copySource() { - return this.originalResponse.copySource; - } - /** - * State of the copy operation - * identified by 'x-ms-copy-id'. Possible values include: 'pending', - * 'success', 'aborted', 'failed' - * - * @readonly - */ - get copyStatus() { - return this.originalResponse.copyStatus; - } - /** - * Only appears when - * x-ms-copy-status is failed or pending. Describes cause of fatal or - * non-fatal copy operation failure. - * - * @readonly - */ - get copyStatusDescription() { - return this.originalResponse.copyStatusDescription; - } - /** - * When a blob is leased, - * specifies whether the lease is of infinite or fixed duration. Possible - * values include: 'infinite', 'fixed'. - * - * @readonly - */ - get leaseDuration() { - return this.originalResponse.leaseDuration; - } - /** - * Lease state of the blob. Possible - * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. - * - * @readonly - */ - get leaseState() { - return this.originalResponse.leaseState; - } - /** - * The current lease status of the - * blob. Possible values include: 'locked', 'unlocked'. - * - * @readonly - */ - get leaseStatus() { - return this.originalResponse.leaseStatus; - } - /** - * A UTC date/time value generated by the service that - * indicates the time at which the response was initiated. - * - * @readonly - */ - get date() { - return this.originalResponse.date; - } - /** - * The number of committed blocks - * present in the blob. This header is returned only for append blobs. - * - * @readonly - */ - get blobCommittedBlockCount() { - return this.originalResponse.blobCommittedBlockCount; - } - /** - * The ETag contains a value that you can use to - * perform operations conditionally, in quotes. - * - * @readonly - */ - get etag() { - return this.originalResponse.etag; - } - /** - * The error code. - * - * @readonly - */ - get errorCode() { - return this.originalResponse.errorCode; - } - /** - * The value of this header is set to - * true if the file data and application metadata are completely encrypted - * using the specified algorithm. Otherwise, the value is set to false (when - * the file is unencrypted, or if only parts of the file/application metadata - * are encrypted). - * - * @readonly - */ - get isServerEncrypted() { - return this.originalResponse.isServerEncrypted; + return [4 /*yield*/, stream.read(size, { abortSignal: options.abortSignal })]; + case 2: return [2 /*return*/, _a.sent()]; + } + }); + }); + }; + AvroParser.readString = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var u8arr, utf8decoder; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, AvroParser.readBytes(stream, options)]; + case 1: + u8arr = _a.sent(); + // polyfill TextDecoder to be backward compatible with older + // nodejs that doesn't expose TextDecoder as a global variable + if (typeof TextDecoder === "undefined" && "function" !== "undefined") { + global.TextDecoder = __webpack_require__(669).TextDecoder; + } + utf8decoder = new TextDecoder(); + return [2 /*return*/, utf8decoder.decode(u8arr)]; + } + }); + }); + }; + AvroParser.readMapPair = function (stream, readItemMethod, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var key, value; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, AvroParser.readString(stream, options)]; + case 1: + key = _a.sent(); + return [4 /*yield*/, readItemMethod(stream, options)]; + case 2: + value = _a.sent(); + return [2 /*return*/, { key: key, value: value }]; + } + }); + }); + }; + AvroParser.readMap = function (stream, readItemMethod, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var readPairMethod, pairs, dict, _i, pairs_1, pair; + var _this = this; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + readPairMethod = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(_this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, AvroParser.readMapPair(stream, readItemMethod, options)]; + case 1: return [2 /*return*/, _a.sent()]; + } + }); + }); + }; + return [4 /*yield*/, AvroParser.readArray(stream, readPairMethod, options)]; + case 1: + pairs = _a.sent(); + dict = {}; + for (_i = 0, pairs_1 = pairs; _i < pairs_1.length; _i++) { + pair = pairs_1[_i]; + dict[pair.key] = pair.value; + } + return [2 /*return*/, dict]; + } + }); + }); + }; + AvroParser.readArray = function (stream, readItemMethod, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var items, count, item; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + items = []; + return [4 /*yield*/, AvroParser.readLong(stream, options)]; + case 1: + count = _a.sent(); + _a.label = 2; + case 2: + if (!(count != 0)) return [3 /*break*/, 8]; + if (!(count < 0)) return [3 /*break*/, 4]; + // Ignore block sizes + return [4 /*yield*/, AvroParser.readLong(stream, options)]; + case 3: + // Ignore block sizes + _a.sent(); + count = -count; + _a.label = 4; + case 4: + if (!count--) return [3 /*break*/, 6]; + return [4 /*yield*/, readItemMethod(stream, options)]; + case 5: + item = _a.sent(); + items.push(item); + return [3 /*break*/, 4]; + case 6: return [4 /*yield*/, AvroParser.readLong(stream, options)]; + case 7: + count = _a.sent(); + return [3 /*break*/, 2]; + case 8: return [2 /*return*/, items]; + } + }); + }); + }; + return AvroParser; +}()); +var AvroComplex; +(function (AvroComplex) { + AvroComplex["RECORD"] = "record"; + AvroComplex["ENUM"] = "enum"; + AvroComplex["ARRAY"] = "array"; + AvroComplex["MAP"] = "map"; + AvroComplex["UNION"] = "union"; + AvroComplex["FIXED"] = "fixed"; +})(AvroComplex || (AvroComplex = {})); +var AvroType = /** @class */ (function () { + function AvroType() { } /** - * If the blob has a MD5 hash, and if - * request contains range header (Range or x-ms-range), this response header - * is returned with the value of the whole blob's MD5 value. This value may - * or may not be equal to the value returned in Content-MD5 header, with the - * latter calculated from the requested range. - * - * @readonly + * Determines the AvroType from the Avro Schema. */ - get blobContentMD5() { - return this.originalResponse.blobContentMD5; + AvroType.fromSchema = function (schema) { + if (typeof schema === "string") { + return AvroType.fromStringSchema(schema); + } + else if (Array.isArray(schema)) { + return AvroType.fromArraySchema(schema); + } + else { + return AvroType.fromObjectSchema(schema); + } + }; + AvroType.fromStringSchema = function (schema) { + switch (schema) { + case AvroPrimitive.NULL: + case AvroPrimitive.BOOLEAN: + case AvroPrimitive.INT: + case AvroPrimitive.LONG: + case AvroPrimitive.FLOAT: + case AvroPrimitive.DOUBLE: + case AvroPrimitive.BYTES: + case AvroPrimitive.STRING: + return new AvroPrimitiveType(schema); + default: + throw new Error("Unexpected Avro type " + schema); + } + }; + AvroType.fromArraySchema = function (schema) { + return new AvroUnionType(schema.map(AvroType.fromSchema)); + }; + AvroType.fromObjectSchema = function (schema) { + var type = schema.type; + // Primitives can be defined as strings or objects + try { + return AvroType.fromStringSchema(type); + } + catch (err) { } + switch (type) { + case AvroComplex.RECORD: + if (schema.aliases) { + throw new Error("aliases currently is not supported, schema: " + schema); + } + if (!schema.name) { + throw new Error("Required attribute 'name' doesn't exist on schema: " + schema); + } + var fields = {}; + if (!schema.fields) { + throw new Error("Required attribute 'fields' doesn't exist on schema: " + schema); + } + for (var _i = 0, _a = schema.fields; _i < _a.length; _i++) { + var field = _a[_i]; + fields[field.name] = AvroType.fromSchema(field.type); + } + return new AvroRecordType(fields, schema.name); + case AvroComplex.ENUM: + if (schema.aliases) { + throw new Error("aliases currently is not supported, schema: " + schema); + } + if (!schema.symbols) { + throw new Error("Required attribute 'symbols' doesn't exist on schema: " + schema); + } + return new AvroEnumType(schema.symbols); + case AvroComplex.MAP: + if (!schema.values) { + throw new Error("Required attribute 'values' doesn't exist on schema: " + schema); + } + return new AvroMapType(AvroType.fromSchema(schema.values)); + case AvroComplex.ARRAY: // Unused today + case AvroComplex.FIXED: // Unused today + default: + throw new Error("Unexpected Avro type " + type + " in " + schema); + } + }; + return AvroType; +}()); +var AvroPrimitive; +(function (AvroPrimitive) { + AvroPrimitive["NULL"] = "null"; + AvroPrimitive["BOOLEAN"] = "boolean"; + AvroPrimitive["INT"] = "int"; + AvroPrimitive["LONG"] = "long"; + AvroPrimitive["FLOAT"] = "float"; + AvroPrimitive["DOUBLE"] = "double"; + AvroPrimitive["BYTES"] = "bytes"; + AvroPrimitive["STRING"] = "string"; +})(AvroPrimitive || (AvroPrimitive = {})); +var AvroPrimitiveType = /** @class */ (function (_super) { + tslib.__extends(AvroPrimitiveType, _super); + function AvroPrimitiveType(primitive) { + var _this = _super.call(this) || this; + _this._primitive = primitive; + return _this; } - /** - * Returns the date and time the file was last - * modified. Any operation that modifies the file or its properties updates - * the last modified time. - * - * @readonly - */ - get lastModified() { - return this.originalResponse.lastModified; + AvroPrimitiveType.prototype.read = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = this._primitive; + switch (_a) { + case AvroPrimitive.NULL: return [3 /*break*/, 1]; + case AvroPrimitive.BOOLEAN: return [3 /*break*/, 3]; + case AvroPrimitive.INT: return [3 /*break*/, 5]; + case AvroPrimitive.LONG: return [3 /*break*/, 7]; + case AvroPrimitive.FLOAT: return [3 /*break*/, 9]; + case AvroPrimitive.DOUBLE: return [3 /*break*/, 11]; + case AvroPrimitive.BYTES: return [3 /*break*/, 13]; + case AvroPrimitive.STRING: return [3 /*break*/, 15]; + } + return [3 /*break*/, 17]; + case 1: return [4 /*yield*/, AvroParser.readNull()]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: return [4 /*yield*/, AvroParser.readBoolean(stream, options)]; + case 4: return [2 /*return*/, _b.sent()]; + case 5: return [4 /*yield*/, AvroParser.readInt(stream, options)]; + case 6: return [2 /*return*/, _b.sent()]; + case 7: return [4 /*yield*/, AvroParser.readLong(stream, options)]; + case 8: return [2 /*return*/, _b.sent()]; + case 9: return [4 /*yield*/, AvroParser.readFloat(stream, options)]; + case 10: return [2 /*return*/, _b.sent()]; + case 11: return [4 /*yield*/, AvroParser.readDouble(stream, options)]; + case 12: return [2 /*return*/, _b.sent()]; + case 13: return [4 /*yield*/, AvroParser.readBytes(stream, options)]; + case 14: return [2 /*return*/, _b.sent()]; + case 15: return [4 /*yield*/, AvroParser.readString(stream, options)]; + case 16: return [2 /*return*/, _b.sent()]; + case 17: throw new Error("Unknown Avro Primitive"); + } + }); + }); + }; + return AvroPrimitiveType; +}(AvroType)); +var AvroEnumType = /** @class */ (function (_super) { + tslib.__extends(AvroEnumType, _super); + function AvroEnumType(symbols) { + var _this = _super.call(this) || this; + _this._symbols = symbols; + return _this; } - /** - * A name-value pair - * to associate with a file storage object. - * - * @readonly - */ - get metadata() { - return this.originalResponse.metadata; + AvroEnumType.prototype.read = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var value; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, AvroParser.readInt(stream, options)]; + case 1: + value = _a.sent(); + return [2 /*return*/, this._symbols[value]]; + } + }); + }); + }; + return AvroEnumType; +}(AvroType)); +var AvroUnionType = /** @class */ (function (_super) { + tslib.__extends(AvroUnionType, _super); + function AvroUnionType(types) { + var _this = _super.call(this) || this; + _this._types = types; + return _this; } - /** - * This header uniquely identifies the request - * that was made and can be used for troubleshooting the request. - * - * @readonly - */ - get requestId() { - return this.originalResponse.requestId; + AvroUnionType.prototype.read = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var typeIndex; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, AvroParser.readInt(stream, options)]; + case 1: + typeIndex = _a.sent(); + return [4 /*yield*/, this._types[typeIndex].read(stream, options)]; + case 2: return [2 /*return*/, _a.sent()]; + } + }); + }); + }; + return AvroUnionType; +}(AvroType)); +var AvroMapType = /** @class */ (function (_super) { + tslib.__extends(AvroMapType, _super); + function AvroMapType(itemType) { + var _this = _super.call(this) || this; + _this._itemType = itemType; + return _this; } - /** - * If a client request id header is sent in the request, this header will be present in the - * response with the same value. - * - * @readonly - */ - get clientRequestId() { - return this.originalResponse.clientRequestId; + AvroMapType.prototype.read = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var readItemMethod; + var _this = this; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + readItemMethod = function (s, options) { return tslib.__awaiter(_this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, this._itemType.read(s, options)]; + case 1: return [2 /*return*/, _a.sent()]; + } + }); + }); }; + return [4 /*yield*/, AvroParser.readMap(stream, readItemMethod, options)]; + case 1: return [2 /*return*/, _a.sent()]; + } + }); + }); + }; + return AvroMapType; +}(AvroType)); +var AvroRecordType = /** @class */ (function (_super) { + tslib.__extends(AvroRecordType, _super); + function AvroRecordType(fields, name) { + var _this = _super.call(this) || this; + _this._fields = fields; + _this._name = name; + return _this; } - /** - * Indicates the version of the File service used - * to execute the request. - * - * @readonly - */ - get version() { - return this.originalResponse.version; + AvroRecordType.prototype.read = function (stream, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var record, _a, _b, _i, key, _c, _d; + return tslib.__generator(this, function (_e) { + switch (_e.label) { + case 0: + record = {}; + record["$schema"] = this._name; + _a = []; + for (_b in this._fields) + _a.push(_b); + _i = 0; + _e.label = 1; + case 1: + if (!(_i < _a.length)) return [3 /*break*/, 4]; + key = _a[_i]; + if (!this._fields.hasOwnProperty(key)) return [3 /*break*/, 3]; + _c = record; + _d = key; + return [4 /*yield*/, this._fields[key].read(stream, options)]; + case 2: + _c[_d] = _e.sent(); + _e.label = 3; + case 3: + _i++; + return [3 /*break*/, 1]; + case 4: return [2 /*return*/, record]; + } + }); + }); + }; + return AvroRecordType; +}(AvroType)); + +// Copyright (c) Microsoft Corporation. +var AvroReader = /** @class */ (function () { + function AvroReader(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { + this._dataStream = dataStream; + this._headerStream = headerStream || dataStream; + this._initialized = false; + this._blockOffset = currentBlockOffset || 0; + this._objectIndex = indexWithinCurrentBlock || 0; + this._initialBlockOffset = currentBlockOffset || 0; } - /** - * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned - * when the blob was encrypted with a customer-provided key. - * - * @readonly - */ - get encryptionKeySha256() { - return this.originalResponse.encryptionKeySha256; + Object.defineProperty(AvroReader.prototype, "blockOffset", { + get: function () { + return this._blockOffset; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(AvroReader.prototype, "objectIndex", { + get: function () { + return this._objectIndex; + }, + enumerable: false, + configurable: true + }); + AvroReader.prototype.initialize = function (options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var header, _a, codec, _b, schema, _c, i; + return tslib.__generator(this, function (_d) { + switch (_d.label) { + case 0: return [4 /*yield*/, AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { + abortSignal: options.abortSignal + })]; + case 1: + header = _d.sent(); + if (!arraysEqual(header, AVRO_INIT_BYTES)) { + throw new Error("Stream is not an Avro file."); + } + // File metadata is written as if defined by the following map schema: + // { "type": "map", "values": "bytes"} + _a = this; + return [4 /*yield*/, AvroParser.readMap(this._headerStream, AvroParser.readString, { + abortSignal: options.abortSignal + })]; + case 2: + // File metadata is written as if defined by the following map schema: + // { "type": "map", "values": "bytes"} + _a._metadata = _d.sent(); + codec = this._metadata[AVRO_CODEC_KEY]; + if (!(codec == undefined || codec == "null")) { + throw new Error("Codecs are not supported"); + } + // The 16-byte, randomly-generated sync marker for this file. + _b = this; + return [4 /*yield*/, AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal + })]; + case 3: + // The 16-byte, randomly-generated sync marker for this file. + _b._syncMarker = _d.sent(); + schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]); + this._itemType = AvroType.fromSchema(schema); + if (this._blockOffset == 0) { + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + } + _c = this; + return [4 /*yield*/, AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal + })]; + case 4: + _c._itemsRemainingInBlock = _d.sent(); + // skip block length + return [4 /*yield*/, AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })]; + case 5: + // skip block length + _d.sent(); + this._initialized = true; + if (!(this._objectIndex && this._objectIndex > 0)) return [3 /*break*/, 9]; + i = 0; + _d.label = 6; + case 6: + if (!(i < this._objectIndex)) return [3 /*break*/, 9]; + return [4 /*yield*/, this._itemType.read(this._dataStream, { abortSignal: options.abortSignal })]; + case 7: + _d.sent(); + this._itemsRemainingInBlock--; + _d.label = 8; + case 8: + i++; + return [3 /*break*/, 6]; + case 9: return [2 /*return*/]; + } + }); + }); + }; + AvroReader.prototype.hasNext = function () { + return !this._initialized || this._itemsRemainingInBlock > 0; + }; + AvroReader.prototype.parseObjects = function (options) { + if (options === void 0) { options = {}; } + return tslib.__asyncGenerator(this, arguments, function parseObjects_1() { + var result, marker, _a, err_1; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + if (!!this._initialized) return [3 /*break*/, 2]; + return [4 /*yield*/, tslib.__await(this.initialize(options))]; + case 1: + _b.sent(); + _b.label = 2; + case 2: + if (!this.hasNext()) return [3 /*break*/, 13]; + return [4 /*yield*/, tslib.__await(this._itemType.read(this._dataStream, { + abortSignal: options.abortSignal + }))]; + case 3: + result = _b.sent(); + this._itemsRemainingInBlock--; + this._objectIndex++; + if (!(this._itemsRemainingInBlock == 0)) return [3 /*break*/, 10]; + return [4 /*yield*/, tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal + }))]; + case 4: + marker = _b.sent(); + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + this._objectIndex = 0; + if (!arraysEqual(this._syncMarker, marker)) { + throw new Error("Stream is not a valid Avro file."); + } + _b.label = 5; + case 5: + _b.trys.push([5, 7, , 8]); + _a = this; + return [4 /*yield*/, tslib.__await(AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal + }))]; + case 6: + _a._itemsRemainingInBlock = _b.sent(); + return [3 /*break*/, 8]; + case 7: + err_1 = _b.sent(); + // We hit the end of the stream. + this._itemsRemainingInBlock = 0; + return [3 /*break*/, 8]; + case 8: + if (!(this._itemsRemainingInBlock > 0)) return [3 /*break*/, 10]; + // Ignore block size + return [4 /*yield*/, tslib.__await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }))]; + case 9: + // Ignore block size + _b.sent(); + _b.label = 10; + case 10: return [4 /*yield*/, tslib.__await(result)]; + case 11: return [4 /*yield*/, _b.sent()]; + case 12: + _b.sent(); + return [3 /*break*/, 2]; + case 13: return [2 /*return*/]; + } + }); + }); + }; + return AvroReader; +}()); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +var AvroReadable = /** @class */ (function () { + function AvroReadable() { } - /** - * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to - * true, then the request returns a crc64 for the range, as long as the range size is less than - * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is - * specified in the same request, it will fail with 400(Bad Request) - */ - get contentCrc64() { - return this.originalResponse.contentCrc64; + return AvroReadable; +}()); + +// Copyright (c) Microsoft Corporation. +var ABORT_ERROR = new abortController.AbortError("Reading from the avro stream was aborted."); +var AvroReadableFromStream = /** @class */ (function (_super) { + tslib.__extends(AvroReadableFromStream, _super); + function AvroReadableFromStream(readable) { + var _this = _super.call(this) || this; + _this._readable = readable; + _this._position = 0; + return _this; } + AvroReadableFromStream.prototype.toUint8Array = function (data) { + if (typeof data === "string") { + return Buffer.from(data); + } + return data; + }; + Object.defineProperty(AvroReadableFromStream.prototype, "position", { + get: function () { + return this._position; + }, + enumerable: false, + configurable: true + }); + AvroReadableFromStream.prototype.read = function (size, options) { + var _a; + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var chunk; + var _this = this; + return tslib.__generator(this, function (_b) { + if ((_a = options.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { + throw ABORT_ERROR; + } + if (size < 0) { + throw new Error("size parameter should be positive: " + size); + } + if (size === 0) { + return [2 /*return*/, new Uint8Array()]; + } + if (!this._readable.readable) { + throw new Error("Stream no longer readable."); + } + chunk = this._readable.read(size); + if (chunk) { + this._position += chunk.length; + // chunk.length maybe less than desired size if the stream ends. + return [2 /*return*/, this.toUint8Array(chunk)]; + } + else { + // register callback to wait for enough data to read + return [2 /*return*/, new Promise(function (resolve, reject) { + var cleanUp = function () { + _this._readable.removeListener("readable", readableCallback); + _this._readable.removeListener("error", rejectCallback); + _this._readable.removeListener("end", rejectCallback); + _this._readable.removeListener("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.removeEventListener("abort", abortHandler); + } + }; + var readableCallback = function () { + var chunk = _this._readable.read(size); + if (chunk) { + _this._position += chunk.length; + cleanUp(); + // chunk.length maybe less than desired size if the stream ends. + resolve(_this.toUint8Array(chunk)); + } + }; + var rejectCallback = function () { + cleanUp(); + reject(); + }; + var abortHandler = function () { + cleanUp(); + reject(ABORT_ERROR); + }; + _this._readable.on("readable", readableCallback); + _this._readable.once("error", rejectCallback); + _this._readable.once("end", rejectCallback); + _this._readable.once("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.addEventListener("abort", abortHandler); + } + })]; + } + }); + }); + }; + return AvroReadableFromStream; +}(AvroReadable)); + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query. + */ +var BlobQuickQueryStream = /** @class */ (function (_super) { + tslib.__extends(BlobQuickQueryStream, _super); /** - * The response body as a browser Blob. - * Always undefined in node.js. + * Creates an instance of BlobQuickQueryStream. * - * @readonly + * @param source - The current ReadableStream returned from getter + * @param options - */ - get blobBody() { - return undefined; + function BlobQuickQueryStream(source, options) { + if (options === void 0) { options = {}; } + var _this = _super.call(this) || this; + _this.avroPaused = true; + _this.source = source; + _this.onProgress = options.onProgress; + _this.onError = options.onError; + _this.avroReader = new AvroReader(new AvroReadableFromStream(_this.source)); + _this.avroIter = _this.avroReader.parseObjects({ abortSignal: options.abortSignal }); + return _this; } + BlobQuickQueryStream.prototype._read = function () { + var _this = this; + if (this.avroPaused) { + this.readInternal().catch(function (err) { + _this.emit("error", err); + }); + } + }; + BlobQuickQueryStream.prototype.readInternal = function () { + return tslib.__awaiter(this, void 0, void 0, function () { + var avroNext, obj, schema, data, bytesScanned, totalBytes, fatal, name_1, description, position; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + this.avroPaused = false; + _a.label = 1; + case 1: return [4 /*yield*/, this.avroIter.next()]; + case 2: + avroNext = _a.sent(); + if (avroNext.done) { + return [3 /*break*/, 4]; + } + obj = avroNext.value; + schema = obj.$schema; + if (typeof schema !== "string") { + throw Error("Missing schema in avro record."); + } + switch (schema) { + case "com.microsoft.azure.storage.queryBlobContents.resultData": + data = obj.data; + if (data instanceof Uint8Array === false) { + throw Error("Invalid data in avro result record."); + } + if (!this.push(Buffer.from(data))) { + this.avroPaused = true; + } + break; + case "com.microsoft.azure.storage.queryBlobContents.progress": + bytesScanned = obj.bytesScanned; + if (typeof bytesScanned !== "number") { + throw Error("Invalid bytesScanned in avro progress record."); + } + if (this.onProgress) { + this.onProgress({ loadedBytes: bytesScanned }); + } + break; + case "com.microsoft.azure.storage.queryBlobContents.end": + if (this.onProgress) { + totalBytes = obj.totalBytes; + if (typeof totalBytes !== "number") { + throw Error("Invalid totalBytes in avro end record."); + } + this.onProgress({ loadedBytes: totalBytes }); + } + this.push(null); + break; + case "com.microsoft.azure.storage.queryBlobContents.error": + if (this.onError) { + fatal = obj.fatal; + if (typeof fatal !== "boolean") { + throw Error("Invalid fatal in avro error record."); + } + name_1 = obj.name; + if (typeof name_1 !== "string") { + throw Error("Invalid name in avro error record."); + } + description = obj.description; + if (typeof description !== "string") { + throw Error("Invalid description in avro error record."); + } + position = obj.position; + if (typeof position !== "number") { + throw Error("Invalid position in avro error record."); + } + this.onError({ + position: position, + name: name_1, + isFatal: fatal, + description: description + }); + } + break; + default: + throw Error("Unknown schema " + schema + " in avro progress record."); + } + _a.label = 3; + case 3: + if (!avroNext.done && !this.avroPaused) return [3 /*break*/, 1]; + _a.label = 4; + case 4: return [2 /*return*/]; + } + }); + }); + }; + return BlobQuickQueryStream; +}(stream.Readable)); + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will + * parse avor data returned by blob query. + */ +var BlobQueryResponse = /** @class */ (function () { /** - * The response body as a node.js Readable stream. - * Always undefined in the browser. - * - * It will parse avor data returned by blob query. + * Creates an instance of BlobQueryResponse. * - * @readonly - */ - get readableStreamBody() { - return coreHttp.isNode ? this.blobDownloadStream : undefined; - } - /** - * The HTTP response. + * @param originalResponse - + * @param options - */ - get _response() { - return this.originalResponse._response; + function BlobQueryResponse(originalResponse, options) { + if (options === void 0) { options = {}; } + this.originalResponse = originalResponse; + this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); } -} + Object.defineProperty(BlobQueryResponse.prototype, "acceptRanges", { + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get: function () { + return this.originalResponse.acceptRanges; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "cacheControl", { + /** + * Returns if it was previously specified + * for the file. + * + * @readonly + */ + get: function () { + return this.originalResponse.cacheControl; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "contentDisposition", { + /** + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly + */ + get: function () { + return this.originalResponse.contentDisposition; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "contentEncoding", { + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get: function () { + return this.originalResponse.contentEncoding; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "contentLanguage", { + /** + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly + */ + get: function () { + return this.originalResponse.contentLanguage; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "blobSequenceNumber", { + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get: function () { + return this.originalResponse.blobSequenceNumber; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "blobType", { + /** + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly + */ + get: function () { + return this.originalResponse.blobType; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "contentLength", { + /** + * The number of bytes present in the + * response body. + * + * @readonly + */ + get: function () { + return this.originalResponse.contentLength; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "contentMD5", { + /** + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly + */ + get: function () { + return this.originalResponse.contentMD5; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "contentRange", { + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get: function () { + return this.originalResponse.contentRange; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "contentType", { + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get: function () { + return this.originalResponse.contentType; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "copyCompletedOn", { + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly + */ + get: function () { + return undefined; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "copyId", { + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly + */ + get: function () { + return this.originalResponse.copyId; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "copyProgress", { + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get: function () { + return this.originalResponse.copyProgress; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "copySource", { + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get: function () { + return this.originalResponse.copySource; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "copyStatus", { + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get: function () { + return this.originalResponse.copyStatus; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "copyStatusDescription", { + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get: function () { + return this.originalResponse.copyStatusDescription; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "leaseDuration", { + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly + */ + get: function () { + return this.originalResponse.leaseDuration; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "leaseState", { + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get: function () { + return this.originalResponse.leaseState; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "leaseStatus", { + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get: function () { + return this.originalResponse.leaseStatus; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "date", { + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get: function () { + return this.originalResponse.date; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "blobCommittedBlockCount", { + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get: function () { + return this.originalResponse.blobCommittedBlockCount; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "etag", { + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get: function () { + return this.originalResponse.etag; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "errorCode", { + /** + * The error code. + * + * @readonly + */ + get: function () { + return this.originalResponse.errorCode; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "isServerEncrypted", { + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get: function () { + return this.originalResponse.isServerEncrypted; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "blobContentMD5", { + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get: function () { + return this.originalResponse.blobContentMD5; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "lastModified", { + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get: function () { + return this.originalResponse.lastModified; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "metadata", { + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get: function () { + return this.originalResponse.metadata; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "requestId", { + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get: function () { + return this.originalResponse.requestId; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "clientRequestId", { + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get: function () { + return this.originalResponse.clientRequestId; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "version", { + /** + * Indicates the version of the File service used + * to execute the request. + * + * @readonly + */ + get: function () { + return this.originalResponse.version; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "encryptionKeySha256", { + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get: function () { + return this.originalResponse.encryptionKeySha256; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "contentCrc64", { + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get: function () { + return this.originalResponse.contentCrc64; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "blobBody", { + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get: function () { + return undefined; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "readableStreamBody", { + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will parse avor data returned by blob query. + * + * @readonly + */ + get: function () { + return coreHttp.isNode ? this.blobDownloadStream : undefined; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobQueryResponse.prototype, "_response", { + /** + * The HTTP response. + */ + get: function () { + return this.originalResponse._response; + }, + enumerable: false, + configurable: true + }); + return BlobQueryResponse; +}()); // Copyright (c) Microsoft Corporation. (function (BlockBlobTier) { @@ -80375,7 +82459,6 @@ function ensureCpkIfSpecified(cpk, isHttps) { } // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. /** * Function that converts PageRange and ClearRange to a common Range object. * PageRange and ClearRange have start and end while Range offset and count @@ -80383,18 +82466,18 @@ function ensureCpkIfSpecified(cpk, isHttps) { * @param response - Model PageBlob Range response */ function rangeResponseFromModel(response) { - const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ + var pageRange = (response._response.parsedBody.pageRange || []).map(function (x) { return ({ offset: x.start, count: x.end - x.start - })); - const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ + }); }); + var clearRange = (response._response.parsedBody.clearRange || []).map(function (x) { return ({ offset: x.start, count: x.end - x.start - })); - return Object.assign(Object.assign({}, response), { pageRange, - clearRange, _response: Object.assign(Object.assign({}, response._response), { parsedBody: { - pageRange, - clearRange + }); }); + return tslib.__assign(tslib.__assign({}, response), { pageRange: pageRange, + clearRange: clearRange, _response: tslib.__assign(tslib.__assign({}, response._response), { parsedBody: { + pageRange: pageRange, + clearRange: clearRange } }) }); } @@ -80405,48 +82488,64 @@ function rangeResponseFromModel(response) { * * @hidden */ -class BlobBeginCopyFromUrlPoller extends coreLro.Poller { - constructor(options) { - const { blobClient, copySource, intervalInMs = 15000, onProgress, resumeFrom, startCopyFromURLOptions } = options; - let state; +var BlobBeginCopyFromUrlPoller = /** @class */ (function (_super) { + tslib.__extends(BlobBeginCopyFromUrlPoller, _super); + function BlobBeginCopyFromUrlPoller(options) { + var _this = this; + var blobClient = options.blobClient, copySource = options.copySource, _a = options.intervalInMs, intervalInMs = _a === void 0 ? 15000 : _a, onProgress = options.onProgress, resumeFrom = options.resumeFrom, startCopyFromURLOptions = options.startCopyFromURLOptions; + var state; if (resumeFrom) { state = JSON.parse(resumeFrom).state; } - const operation = makeBlobBeginCopyFromURLPollOperation(Object.assign(Object.assign({}, state), { blobClient, - copySource, - startCopyFromURLOptions })); - super(operation); + var operation = makeBlobBeginCopyFromURLPollOperation(tslib.__assign(tslib.__assign({}, state), { blobClient: blobClient, + copySource: copySource, + startCopyFromURLOptions: startCopyFromURLOptions })); + _this = _super.call(this, operation) || this; if (typeof onProgress === "function") { - this.onProgress(onProgress); + _this.onProgress(onProgress); } - this.intervalInMs = intervalInMs; + _this.intervalInMs = intervalInMs; + return _this; } - delay() { + BlobBeginCopyFromUrlPoller.prototype.delay = function () { return coreHttp.delay(this.intervalInMs); - } -} + }; + return BlobBeginCopyFromUrlPoller; +}(coreLro.Poller)); /** * Note: Intentionally using function expression over arrow function expression * so that the function can be invoked with a different context. * This affects what `this` refers to. * @hidden */ -const cancel = async function cancel(options = {}) { - const state = this.state; - const { copyId } = state; - if (state.isCompleted) { - return makeBlobBeginCopyFromURLPollOperation(state); - } - if (!copyId) { - state.isCancelled = true; - return makeBlobBeginCopyFromURLPollOperation(state); - } - // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call - await state.blobClient.abortCopyFromURL(copyId, { - abortSignal: options.abortSignal +var cancel = function cancel(options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var state, copyId; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + state = this.state; + copyId = state.copyId; + if (state.isCompleted) { + return [2 /*return*/, makeBlobBeginCopyFromURLPollOperation(state)]; + } + if (!copyId) { + state.isCancelled = true; + return [2 /*return*/, makeBlobBeginCopyFromURLPollOperation(state)]; + } + // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call + return [4 /*yield*/, state.blobClient.abortCopyFromURL(copyId, { + abortSignal: options.abortSignal + })]; + case 1: + // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call + _a.sent(); + state.isCancelled = true; + return [2 /*return*/, makeBlobBeginCopyFromURLPollOperation(state)]; + } + }); }); - state.isCancelled = true; - return makeBlobBeginCopyFromURLPollOperation(state); }; /** * Note: Intentionally using function expression over arrow function expression @@ -80454,48 +82553,64 @@ const cancel = async function cancel(options = {}) { * This affects what `this` refers to. * @hidden */ -const update = async function update(options = {}) { - const state = this.state; - const { blobClient, copySource, startCopyFromURLOptions } = state; - if (!state.isStarted) { - state.isStarted = true; - const result = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions); - // copyId is needed to abort - state.copyId = result.copyId; - if (result.copyStatus === "success") { - state.result = result; - state.isCompleted = true; - } - } - else if (!state.isCompleted) { - try { - const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal }); - const { copyStatus, copyProgress } = result; - const prevCopyProgress = state.copyProgress; - if (copyProgress) { - state.copyProgress = copyProgress; - } - if (copyStatus === "pending" && - copyProgress !== prevCopyProgress && - typeof options.fireProgress === "function") { - // trigger in setTimeout, or swallow error? - options.fireProgress(state); - } - else if (copyStatus === "success") { - state.result = result; - state.isCompleted = true; - } - else if (copyStatus === "failed") { - state.error = new Error(`Blob copy failed with reason: "${result.copyStatusDescription || "unknown"}"`); - state.isCompleted = true; +var update = function update(options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var state, blobClient, copySource, startCopyFromURLOptions, result, result, copyStatus, copyProgress, prevCopyProgress, err_1; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + state = this.state; + blobClient = state.blobClient, copySource = state.copySource, startCopyFromURLOptions = state.startCopyFromURLOptions; + if (!!state.isStarted) return [3 /*break*/, 2]; + state.isStarted = true; + return [4 /*yield*/, blobClient.startCopyFromURL(copySource, startCopyFromURLOptions)]; + case 1: + result = _a.sent(); + // copyId is needed to abort + state.copyId = result.copyId; + if (result.copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } + return [3 /*break*/, 6]; + case 2: + if (!!state.isCompleted) return [3 /*break*/, 6]; + _a.label = 3; + case 3: + _a.trys.push([3, 5, , 6]); + return [4 /*yield*/, state.blobClient.getProperties({ abortSignal: options.abortSignal })]; + case 4: + result = _a.sent(); + copyStatus = result.copyStatus, copyProgress = result.copyProgress; + prevCopyProgress = state.copyProgress; + if (copyProgress) { + state.copyProgress = copyProgress; + } + if (copyStatus === "pending" && + copyProgress !== prevCopyProgress && + typeof options.fireProgress === "function") { + // trigger in setTimeout, or swallow error? + options.fireProgress(state); + } + else if (copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } + else if (copyStatus === "failed") { + state.error = new Error("Blob copy failed with reason: \"" + (result.copyStatusDescription || "unknown") + "\""); + state.isCompleted = true; + } + return [3 /*break*/, 6]; + case 5: + err_1 = _a.sent(); + state.error = err_1; + state.isCompleted = true; + return [3 /*break*/, 6]; + case 6: return [2 /*return*/, makeBlobBeginCopyFromURLPollOperation(state)]; } - } - catch (err) { - state.error = err; - state.isCompleted = true; - } - } - return makeBlobBeginCopyFromURLPollOperation(state); + }); + }); }; /** * Note: Intentionally using function expression over arrow function expression @@ -80503,8 +82618,8 @@ const update = async function update(options = {}) { * This affects what `this` refers to. * @hidden */ -const toString = function toString() { - return JSON.stringify({ state: this.state }, (key, value) => { +var toString = function toString() { + return JSON.stringify({ state: this.state }, function (key, value) { // remove blobClient from serialized state since a client can't be hydrated from this info. if (key === "blobClient") { return undefined; @@ -80518,10 +82633,10 @@ const toString = function toString() { */ function makeBlobBeginCopyFromURLPollOperation(state) { return { - state: Object.assign({}, state), - cancel, - toString, - update + state: tslib.__assign({}, state), + cancel: cancel, + toString: toString, + update: update }; } @@ -80536,14 +82651,14 @@ function makeBlobBeginCopyFromURLPollOperation(state) { */ function rangeToString(iRange) { if (iRange.offset < 0) { - throw new RangeError(`Range.offset cannot be smaller than 0.`); + throw new RangeError("Range.offset cannot be smaller than 0."); } if (iRange.count && iRange.count <= 0) { - throw new RangeError(`Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`); + throw new RangeError("Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end."); } return iRange.count - ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}` - : `bytes=${iRange.offset}-`; + ? "bytes=" + iRange.offset + "-" + (iRange.offset + iRange.count - 1) + : "bytes=" + iRange.offset + "-"; } // Copyright (c) Microsoft Corporation. @@ -80560,12 +82675,13 @@ var BatchStates; * Will stop execute left operations when one of the executed operation throws an error. * But Batch cannot cancel ongoing operations, you need to cancel them by yourself. */ -class Batch { +var Batch = /** @class */ (function () { /** * Creates an instance of Batch. * @param concurrency - */ - constructor(concurrency = 5) { + function Batch(concurrency) { + if (concurrency === void 0) { concurrency = 5; } /** * Number of active operations under execution. */ @@ -80598,53 +82714,69 @@ class Batch { * * @param operation - */ - addOperation(operation) { - this.operations.push(async () => { - try { - this.actives++; - await operation(); - this.actives--; - this.completed++; - this.parallelExecute(); - } - catch (error) { - this.emitter.emit("error", error); - } - }); - } + Batch.prototype.addOperation = function (operation) { + var _this = this; + this.operations.push(function () { return tslib.__awaiter(_this, void 0, void 0, function () { + var error_1; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + _a.trys.push([0, 2, , 3]); + this.actives++; + return [4 /*yield*/, operation()]; + case 1: + _a.sent(); + this.actives--; + this.completed++; + this.parallelExecute(); + return [3 /*break*/, 3]; + case 2: + error_1 = _a.sent(); + this.emitter.emit("error", error_1); + return [3 /*break*/, 3]; + case 3: return [2 /*return*/]; + } + }); + }); }); + }; /** * Start execute operations in the queue. * */ - async do() { - if (this.operations.length === 0) { - return Promise.resolve(); - } - this.parallelExecute(); - return new Promise((resolve, reject) => { - this.emitter.on("finish", resolve); - this.emitter.on("error", (error) => { - this.state = BatchStates.Error; - reject(error); + Batch.prototype.do = function () { + return tslib.__awaiter(this, void 0, void 0, function () { + var _this = this; + return tslib.__generator(this, function (_a) { + if (this.operations.length === 0) { + return [2 /*return*/, Promise.resolve()]; + } + this.parallelExecute(); + return [2 /*return*/, new Promise(function (resolve, reject) { + _this.emitter.on("finish", resolve); + _this.emitter.on("error", function (error) { + _this.state = BatchStates.Error; + reject(error); + }); + })]; }); }); - } + }; /** * Get next operation to be executed. Return null when reaching ends. * */ - nextOperation() { + Batch.prototype.nextOperation = function () { if (this.offset < this.operations.length) { return this.operations[this.offset++]; } return null; - } + }; /** * Start execute operations. One one the most important difference between * this method with do() is that do() wraps as an sync method. * */ - parallelExecute() { + Batch.prototype.parallelExecute = function () { if (this.state === BatchStates.Error) { return; } @@ -80653,7 +82785,7 @@ class Batch { return; } while (this.actives < this.concurrency) { - const operation = this.nextOperation(); + var operation = this.nextOperation(); if (operation) { operation(); } @@ -80661,14 +82793,16 @@ class Batch { return; } } - } -} + }; + return Batch; +}()); // Copyright (c) Microsoft Corporation. /** * This class generates a readable stream from the data in an array of buffers. */ -class BuffersStream extends stream.Readable { +var BuffersStream = /** @class */ (function (_super) { + tslib.__extends(BuffersStream, _super); /** * Creates an instance of BuffersStream that will emit the data * contained in the array of buffers. @@ -80676,44 +82810,46 @@ class BuffersStream extends stream.Readable { * @param buffers - Array of buffers containing the data * @param byteLength - The total length of data contained in the buffers */ - constructor(buffers, byteLength, options) { - super(options); - this.buffers = buffers; - this.byteLength = byteLength; - this.byteOffsetInCurrentBuffer = 0; - this.bufferIndex = 0; - this.pushedBytesLength = 0; + function BuffersStream(buffers, byteLength, options) { + var _this = _super.call(this, options) || this; + _this.buffers = buffers; + _this.byteLength = byteLength; + _this.byteOffsetInCurrentBuffer = 0; + _this.bufferIndex = 0; + _this.pushedBytesLength = 0; // check byteLength is no larger than buffers[] total length - let buffersLength = 0; - for (const buf of this.buffers) { + var buffersLength = 0; + for (var _i = 0, _a = _this.buffers; _i < _a.length; _i++) { + var buf = _a[_i]; buffersLength += buf.byteLength; } - if (buffersLength < this.byteLength) { + if (buffersLength < _this.byteLength) { throw new Error("Data size shouldn't be larger than the total length of buffers."); } + return _this; } /** * Internal _read() that will be called when the stream wants to pull more data in. * * @param size - Optional. The size of data to be read */ - _read(size) { + BuffersStream.prototype._read = function (size) { if (this.pushedBytesLength >= this.byteLength) { this.push(null); } if (!size) { size = this.readableHighWaterMark; } - const outBuffers = []; - let i = 0; + var outBuffers = []; + var i = 0; while (i < size && this.pushedBytesLength < this.byteLength) { // The last buffer may be longer than the data it contains. - const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; - const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; - const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); + var remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; + var remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; + var remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); if (remaining > size - i) { // chunkSize = size - i - const end = this.byteOffsetInCurrentBuffer + size - i; + var end = this.byteOffsetInCurrentBuffer + size - i; outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); this.pushedBytesLength += size - i; this.byteOffsetInCurrentBuffer = end; @@ -80722,7 +82858,7 @@ class BuffersStream extends stream.Readable { } else { // chunkSize = remaining - const end = this.byteOffsetInCurrentBuffer + remaining; + var end = this.byteOffsetInCurrentBuffer + remaining; outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); if (remaining === remainingCapacityInThisBuffer) { // this.buffers[this.bufferIndex] used up, shift to next one @@ -80742,15 +82878,16 @@ class BuffersStream extends stream.Readable { else if (outBuffers.length === 1) { this.push(outBuffers[0]); } - } -} + }; + return BuffersStream; +}(stream.Readable)); // Copyright (c) Microsoft Corporation. /** * maxBufferLength is max size of each buffer in the pooled buffers. */ // Can't use import as Typescript doesn't recognize "buffer". -const maxBufferLength = __webpack_require__(407).constants.MAX_LENGTH; +var maxBufferLength = __webpack_require__(407).constants.MAX_LENGTH; /** * This class provides a buffer container which conceptually has no hard size limit. * It accepts a capacity, an array of input buffers and the total length of input data. @@ -80759,8 +82896,8 @@ const maxBufferLength = __webpack_require__(407).constants.MAX_LENGTH; * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream * assembled from all the data in the internal "buffer". */ -class PooledBuffer { - constructor(capacity, buffers, totalLength) { +var PooledBuffer = /** @class */ (function () { + function PooledBuffer(capacity, buffers, totalLength) { /** * Internal buffers used to keep the data. * Each buffer has a length of the maxBufferLength except last one. @@ -80769,9 +82906,9 @@ class PooledBuffer { this.capacity = capacity; this._size = 0; // allocate - const bufferNum = Math.ceil(capacity / maxBufferLength); - for (let i = 0; i < bufferNum; i++) { - let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; + var bufferNum = Math.ceil(capacity / maxBufferLength); + for (var i = 0; i < bufferNum; i++) { + var len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; if (len === 0) { len = maxBufferLength; } @@ -80781,12 +82918,16 @@ class PooledBuffer { this.fill(buffers, totalLength); } } - /** - * The size of the data contained in the pooled buffers. - */ - get size() { - return this._size; - } + Object.defineProperty(PooledBuffer.prototype, "size", { + /** + * The size of the data contained in the pooled buffers. + */ + get: function () { + return this._size; + }, + enumerable: false, + configurable: true + }); /** * Fill the internal buffers with data in the input buffers serially * with respect to the total length and the total capacity of the internal buffers. @@ -80796,13 +82937,13 @@ class PooledBuffer { * @param totalLength - Total length of the data to be filled in. * */ - fill(buffers, totalLength) { + PooledBuffer.prototype.fill = function (buffers, totalLength) { this._size = Math.min(this.capacity, totalLength); - let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; + var i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; while (totalCopiedNum < this._size) { - const source = buffers[i]; - const target = this.buffers[j]; - const copiedNum = source.copy(target, targetOffset, sourceOffset); + var source = buffers[i]; + var target = this.buffers[j]; + var copiedNum = source.copy(target, targetOffset, sourceOffset); totalCopiedNum += copiedNum; sourceOffset += copiedNum; targetOffset += copiedNum; @@ -80820,15 +82961,16 @@ class PooledBuffer { if (buffers.length > 0) { buffers[0] = buffers[0].slice(sourceOffset); } - } + }; /** * Get the readable stream assembled from all the data in the internal buffers. * */ - getReadableStream() { + PooledBuffer.prototype.getReadableStream = function () { return new BuffersStream(this.buffers, this.size); - } -} + }; + return PooledBuffer; +}()); // Copyright (c) Microsoft Corporation. /** @@ -80853,7 +82995,7 @@ class PooledBuffer { * in this situation, outgoing handlers are blocked. * Outgoing queue shouldn't be empty. */ -class BufferScheduler { +var BufferScheduler = /** @class */ (function () { /** * Creates an instance of BufferScheduler. * @@ -80866,7 +83008,7 @@ class BufferScheduler { * @param concurrency - Concurrency of executing outgoingHandlers (>0) * @param encoding - [Optional] Encoding of Readable stream when it's a string stream */ - constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { + function BufferScheduler(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { /** * An internal event emitter. */ @@ -80912,13 +83054,13 @@ class BufferScheduler { */ this.outgoing = []; if (bufferSize <= 0) { - throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`); + throw new RangeError("bufferSize must be larger than 0, current is " + bufferSize); } if (maxBuffers <= 0) { - throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`); + throw new RangeError("maxBuffers must be larger than 0, current is " + maxBuffers); } if (concurrency <= 0) { - throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`); + throw new RangeError("concurrency must be larger than 0, current is " + concurrency); } this.bufferSize = bufferSize; this.maxBuffers = maxBuffers; @@ -80932,64 +83074,69 @@ class BufferScheduler { * returns error. * */ - async do() { - return new Promise((resolve, reject) => { - this.readable.on("data", (data) => { - data = typeof data === "string" ? Buffer.from(data, this.encoding) : data; - this.appendUnresolvedData(data); - if (!this.resolveData()) { - this.readable.pause(); - } - }); - this.readable.on("error", (err) => { - this.emitter.emit("error", err); - }); - this.readable.on("end", () => { - this.isStreamEnd = true; - this.emitter.emit("checkEnd"); - }); - this.emitter.on("error", (err) => { - this.isError = true; - this.readable.pause(); - reject(err); - }); - this.emitter.on("checkEnd", () => { - if (this.outgoing.length > 0) { - this.triggerOutgoingHandlers(); - return; - } - if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { - if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { - const buffer = this.shiftBufferFromUnresolvedDataArray(); - this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset) - .then(resolve) - .catch(reject); - } - else if (this.unresolvedLength >= this.bufferSize) { - return; - } - else { - resolve(); - } - } + BufferScheduler.prototype.do = function () { + return tslib.__awaiter(this, void 0, void 0, function () { + var _this = this; + return tslib.__generator(this, function (_a) { + return [2 /*return*/, new Promise(function (resolve, reject) { + _this.readable.on("data", function (data) { + data = typeof data === "string" ? Buffer.from(data, _this.encoding) : data; + _this.appendUnresolvedData(data); + if (!_this.resolveData()) { + _this.readable.pause(); + } + }); + _this.readable.on("error", function (err) { + _this.emitter.emit("error", err); + }); + _this.readable.on("end", function () { + _this.isStreamEnd = true; + _this.emitter.emit("checkEnd"); + }); + _this.emitter.on("error", function (err) { + _this.isError = true; + _this.readable.pause(); + reject(err); + }); + _this.emitter.on("checkEnd", function () { + if (_this.outgoing.length > 0) { + _this.triggerOutgoingHandlers(); + return; + } + if (_this.isStreamEnd && _this.executingOutgoingHandlers === 0) { + if (_this.unresolvedLength > 0 && _this.unresolvedLength < _this.bufferSize) { + var buffer_1 = _this.shiftBufferFromUnresolvedDataArray(); + _this.outgoingHandler(function () { return buffer_1.getReadableStream(); }, buffer_1.size, _this.offset) + .then(resolve) + .catch(reject); + } + else if (_this.unresolvedLength >= _this.bufferSize) { + return; + } + else { + resolve(); + } + } + }); + })]; }); }); - } + }; /** * Insert a new data into unresolved array. * * @param data - */ - appendUnresolvedData(data) { + BufferScheduler.prototype.appendUnresolvedData = function (data) { this.unresolvedDataArray.push(data); this.unresolvedLength += data.length; - } + }; /** * Try to shift a buffer with size in blockSize. The buffer returned may be less * than blockSize when data in unresolvedDataArray is less than bufferSize. * */ - shiftBufferFromUnresolvedDataArray(buffer) { + BufferScheduler.prototype.shiftBufferFromUnresolvedDataArray = function (buffer) { if (!buffer) { buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength); } @@ -80998,7 +83145,7 @@ class BufferScheduler { } this.unresolvedLength -= buffer.size; return buffer; - } + }; /** * Resolve data in unresolvedDataArray. For every buffer with size in blockSize * shifted, it will try to get (or allocate a buffer) from incoming, and fill it, @@ -81008,9 +83155,9 @@ class BufferScheduler { * * @returns Return false when buffers in incoming are not enough, else true. */ - resolveData() { + BufferScheduler.prototype.resolveData = function () { while (this.unresolvedLength >= this.bufferSize) { - let buffer; + var buffer = void 0; if (this.incoming.length > 0) { buffer = this.incoming.shift(); this.shiftBufferFromUnresolvedDataArray(buffer); @@ -81029,55 +83176,75 @@ class BufferScheduler { this.triggerOutgoingHandlers(); } return true; - } + }; /** * Try to trigger a outgoing handler for every buffer in outgoing. Stop when * concurrency reaches. */ - async triggerOutgoingHandlers() { - let buffer; - do { - if (this.executingOutgoingHandlers >= this.concurrency) { - return; - } - buffer = this.outgoing.shift(); - if (buffer) { - this.triggerOutgoingHandler(buffer); - } - } while (buffer); - } + BufferScheduler.prototype.triggerOutgoingHandlers = function () { + return tslib.__awaiter(this, void 0, void 0, function () { + var buffer; + return tslib.__generator(this, function (_a) { + do { + if (this.executingOutgoingHandlers >= this.concurrency) { + return [2 /*return*/]; + } + buffer = this.outgoing.shift(); + if (buffer) { + this.triggerOutgoingHandler(buffer); + } + } while (buffer); + return [2 /*return*/]; + }); + }); + }; /** * Trigger a outgoing handler for a buffer shifted from outgoing. * * @param buffer - */ - async triggerOutgoingHandler(buffer) { - const bufferLength = buffer.size; - this.executingOutgoingHandlers++; - this.offset += bufferLength; - try { - await this.outgoingHandler(() => buffer.getReadableStream(), bufferLength, this.offset - bufferLength); - } - catch (err) { - this.emitter.emit("error", err); - return; - } - this.executingOutgoingHandlers--; - this.reuseBuffer(buffer); - this.emitter.emit("checkEnd"); - } + BufferScheduler.prototype.triggerOutgoingHandler = function (buffer) { + return tslib.__awaiter(this, void 0, void 0, function () { + var bufferLength, err_1; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + bufferLength = buffer.size; + this.executingOutgoingHandlers++; + this.offset += bufferLength; + _a.label = 1; + case 1: + _a.trys.push([1, 3, , 4]); + return [4 /*yield*/, this.outgoingHandler(function () { return buffer.getReadableStream(); }, bufferLength, this.offset - bufferLength)]; + case 2: + _a.sent(); + return [3 /*break*/, 4]; + case 3: + err_1 = _a.sent(); + this.emitter.emit("error", err_1); + return [2 /*return*/]; + case 4: + this.executingOutgoingHandlers--; + this.reuseBuffer(buffer); + this.emitter.emit("checkEnd"); + return [2 /*return*/]; + } + }); + }); + }; /** * Return buffer used by outgoing handler into incoming. * * @param buffer - */ - reuseBuffer(buffer) { + BufferScheduler.prototype.reuseBuffer = function (buffer) { this.incoming.push(buffer); if (!this.isError && this.resolveData() && !this.isStreamEnd) { this.readable.resume(); } - } -} + }; + return BufferScheduler; +}()); // Copyright (c) Microsoft Corporation. /** @@ -81089,34 +83256,39 @@ class BufferScheduler { * @param end - To which position in the buffer to be filled, exclusive * @param encoding - Encoding of the Readable stream */ -async function streamToBuffer(stream, buffer, offset, end, encoding) { - let pos = 0; // Position in stream - const count = end - offset; // Total amount of data needed in stream - return new Promise((resolve, reject) => { - stream.on("readable", () => { - if (pos >= count) { - resolve(); - return; - } - let chunk = stream.read(); - if (!chunk) { - return; - } - if (typeof chunk === "string") { - chunk = Buffer.from(chunk, encoding); - } - // How much data needed in this chunk - const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; - buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); - pos += chunkLength; - }); - stream.on("end", () => { - if (pos < count) { - reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); - } - resolve(); +function streamToBuffer(stream, buffer, offset, end, encoding) { + return tslib.__awaiter(this, void 0, void 0, function () { + var pos, count; + return tslib.__generator(this, function (_a) { + pos = 0; + count = end - offset; + return [2 /*return*/, new Promise(function (resolve, reject) { + stream.on("readable", function () { + if (pos >= count) { + resolve(); + return; + } + var chunk = stream.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + // How much data needed in this chunk + var chunkLength = pos + chunk.length > count ? count - pos : chunk.length; + buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); + pos += chunkLength; + }); + stream.on("end", function () { + if (pos < count) { + reject(new Error("Stream drains before getting enough data needed. Data read: " + pos + ", data need: " + count)); + } + resolve(); + }); + stream.on("error", reject); + })]; }); - stream.on("error", reject); }); } /** @@ -81128,29 +83300,34 @@ async function streamToBuffer(stream, buffer, offset, end, encoding) { * @returns with the count of bytes read. * @throws `RangeError` If buffer size is not big enough. */ -async function streamToBuffer2(stream, buffer, encoding) { - let pos = 0; // Position in stream - const bufferSize = buffer.length; - return new Promise((resolve, reject) => { - stream.on("readable", () => { - let chunk = stream.read(); - if (!chunk) { - return; - } - if (typeof chunk === "string") { - chunk = Buffer.from(chunk, encoding); - } - if (pos + chunk.length > bufferSize) { - reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); - return; - } - buffer.fill(chunk, pos, pos + chunk.length); - pos += chunk.length; - }); - stream.on("end", () => { - resolve(pos); +function streamToBuffer2(stream, buffer, encoding) { + return tslib.__awaiter(this, void 0, void 0, function () { + var pos, bufferSize; + return tslib.__generator(this, function (_a) { + pos = 0; + bufferSize = buffer.length; + return [2 /*return*/, new Promise(function (resolve, reject) { + stream.on("readable", function () { + var chunk = stream.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + if (pos + chunk.length > bufferSize) { + reject(new Error("Stream exceeds buffer size. Buffer size: " + bufferSize)); + return; + } + buffer.fill(chunk, pos, pos + chunk.length); + pos += chunk.length; + }); + stream.on("end", function () { + resolve(pos); + }); + stream.on("error", reject); + })]; }); - stream.on("error", reject); }); } /** @@ -81161,17 +83338,21 @@ async function streamToBuffer2(stream, buffer, encoding) { * @param rs - The read stream. * @param file - Destination file path. */ -async function readStreamToLocalFile(rs, file) { - return new Promise((resolve, reject) => { - const ws = fs.createWriteStream(file); - rs.on("error", (err) => { - reject(err); - }); - ws.on("error", (err) => { - reject(err); +function readStreamToLocalFile(rs, file) { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + return [2 /*return*/, new Promise(function (resolve, reject) { + var ws = fs.createWriteStream(file); + rs.on("error", function (err) { + reject(err); + }); + ws.on("error", function (err) { + reject(err); + }); + ws.on("close", resolve); + rs.pipe(ws); + })]; }); - ws.on("close", resolve); - rs.pipe(ws); }); } /** @@ -81179,18 +83360,21 @@ async function readStreamToLocalFile(rs, file) { * * Promisified version of fs.stat(). */ -const fsStat = util.promisify(fs.stat); -const fsCreateReadStream = fs.createReadStream; +var fsStat = util.promisify(fs.stat); +var fsCreateReadStream = fs.createReadStream; /** * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, * append blob, or page blob. */ -class BlobClient extends StorageClient { - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { +var BlobClient = /** @class */ (function (_super) { + tslib.__extends(BlobClient, _super); + function BlobClient(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { + var _a; + var _this = this; options = options || {}; - let pipeline; - let url; + var pipeline; + var url; if (credentialOrPipelineOrContainerName instanceof Pipeline) { // (url: string, pipeline: Pipeline) url = urlOrConnectionString; @@ -81216,12 +83400,12 @@ class BlobClient extends StorageClient { blobNameOrOptions && typeof blobNameOrOptions === "string") { // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) - const containerName = credentialOrPipelineOrContainerName; - const blobName = blobNameOrOptions; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + var containerName = credentialOrPipelineOrContainerName; + var blobName = blobNameOrOptions; + var extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); pipeline = newPipeline(sharedKeyCredential, options); @@ -81241,27 +83425,33 @@ class BlobClient extends StorageClient { else { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } - super(url, pipeline); - ({ - blobName: this._name, - containerName: this._containerName - } = this.getBlobAndContainerNamesFromUrl()); - this.blobContext = new Blob$1(this.storageClientContext); - this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); - this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); - } - /** - * The name of the blob. - */ - get name() { - return this._name; - } - /** - * The name of the storage container the blob is associated with. - */ - get containerName() { - return this._containerName; + _this = _super.call(this, url, pipeline) || this; + (_a = _this.getBlobAndContainerNamesFromUrl(), _this._name = _a.blobName, _this._containerName = _a.containerName); + _this.blobContext = new Blob$1(_this.storageClientContext); + _this._snapshot = getURLParameter(_this.url, URLConstants.Parameters.SNAPSHOT); + _this._versionId = getURLParameter(_this.url, URLConstants.Parameters.VERSIONID); + return _this; } + Object.defineProperty(BlobClient.prototype, "name", { + /** + * The name of the blob. + */ + get: function () { + return this._name; + }, + enumerable: false, + configurable: true + }); + Object.defineProperty(BlobClient.prototype, "containerName", { + /** + * The name of the storage container the blob is associated with. + */ + get: function () { + return this._containerName; + }, + enumerable: false, + configurable: true + }); /** * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. * Provide "" will remove the snapshot and return a Client to the base blob. @@ -81269,9 +83459,9 @@ class BlobClient extends StorageClient { * @param snapshot - The snapshot timestamp. * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp */ - withSnapshot(snapshot) { + BlobClient.prototype.withSnapshot = function (snapshot) { return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); - } + }; /** * Creates a new BlobClient object pointing to a version of this blob. * Provide "" will remove the versionId and return a Client to the base blob. @@ -81279,30 +83469,30 @@ class BlobClient extends StorageClient { * @param versionId - The versionId. * @returns A new BlobClient object pointing to the version of this blob. */ - withVersion(versionId) { + BlobClient.prototype.withVersion = function (versionId) { return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId.length === 0 ? undefined : versionId), this.pipeline); - } + }; /** * Creates a AppendBlobClient object. * */ - getAppendBlobClient() { + BlobClient.prototype.getAppendBlobClient = function () { return new AppendBlobClient(this.url, this.pipeline); - } + }; /** * Creates a BlockBlobClient object. * */ - getBlockBlobClient() { + BlobClient.prototype.getBlockBlobClient = function () { return new BlockBlobClient(this.url, this.pipeline); - } + }; /** * Creates a PageBlobClient object. * */ - getPageBlobClient() { + BlobClient.prototype.getPageBlobClient = function () { return new PageBlobClient(this.url, this.pipeline); - } + }; /** * Reads or downloads a blob from the system, including its metadata and properties. * You can also call Get Blob to read a snapshot. @@ -81362,77 +83552,98 @@ class BlobClient extends StorageClient { * } * ``` */ - async download(offset = 0, count, options = {}) { + BlobClient.prototype.download = function (offset, count, options) { var _a; - options.conditions = options.conditions || {}; - options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const { span, updatedOptions } = createSpan("BlobClient-download", options); - try { - const res = await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onDownloadProgress: coreHttp.isNode ? undefined : options.onProgress // for Node.js, progress is reported by RetriableReadableStream - }, range: offset === 0 && !count ? undefined : rangeToString({ offset, count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); - // Return browser response immediately - if (false) {} - // We support retrying when download stream unexpected ends in Node.js runtime - // Following code shouldn't be bundled into browser build, however some - // bundlers may try to bundle following code and "FileReadResponse.ts". - // In this case, "FileDownloadResponse.browser.ts" will be used as a shim of "FileDownloadResponse.ts" - // The config is in package.json "browser" field - if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) { - // TODO: Default value or make it a required parameter? - options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; - } - if (res.contentLength === undefined) { - throw new RangeError(`File download response doesn't contain valid content length header`); - } - if (!res.etag) { - throw new RangeError(`File download response doesn't contain valid etag header`); - } - return new BlobDownloadResponse(wrappedRes, async (start) => { - var _a; - const updatedOptions = { - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ifMatch: options.conditions.ifMatch || res.etag, - ifModifiedSince: options.conditions.ifModifiedSince, - ifNoneMatch: options.conditions.ifNoneMatch, - ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, - ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions - }, - range: rangeToString({ - count: offset + res.contentLength - start, - offset: start - }), - rangeGetContentMD5: options.rangeGetContentMD5, - rangeGetContentCRC64: options.rangeGetContentCrc64, - snapshot: options.snapshot, - cpkInfo: options.customerProvidedKey - }; - // Debug purpose only - // console.log( - // `Read from internal stream, range: ${ - // updatedOptions.range - // }, options: ${JSON.stringify(updatedOptions)}` - // ); - return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedOptions))).readableStreamBody; - }, offset, res.contentLength, { - maxRetryRequests: options.maxRetryRequests, - onProgress: options.onProgress - }); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (offset === void 0) { offset = 0; } + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, res_1, wrappedRes, e_1; + var _this = this; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + options.conditions = options.conditions || {}; + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + _b = createSpan("BlobClient-download", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.blobContext.download(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onDownloadProgress: coreHttp.isNode ? undefined : options.onProgress // for Node.js, progress is reported by RetriableReadableStream + }, range: offset === 0 && !count ? undefined : rangeToString({ offset: offset, count: count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: + res_1 = _c.sent(); + wrappedRes = tslib.__assign(tslib.__assign({}, res_1), { _response: res_1._response, objectReplicationDestinationPolicyId: res_1.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res_1.objectReplicationRules) }); + // We support retrying when download stream unexpected ends in Node.js runtime + // Following code shouldn't be bundled into browser build, however some + // bundlers may try to bundle following code and "FileReadResponse.ts". + // In this case, "FileDownloadResponse.browser.ts" will be used as a shim of "FileDownloadResponse.ts" + // The config is in package.json "browser" field + if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) { + // TODO: Default value or make it a required parameter? + options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; + } + if (res_1.contentLength === undefined) { + throw new RangeError("File download response doesn't contain valid content length header"); + } + if (!res_1.etag) { + throw new RangeError("File download response doesn't contain valid etag header"); + } + return [2 /*return*/, new BlobDownloadResponse(wrappedRes, function (start) { return tslib.__awaiter(_this, void 0, void 0, function () { + var updatedOptions; + var _a; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + updatedOptions = { + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ifMatch: options.conditions.ifMatch || res_1.etag, + ifModifiedSince: options.conditions.ifModifiedSince, + ifNoneMatch: options.conditions.ifNoneMatch, + ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, + ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions + }, + range: rangeToString({ + count: offset + res_1.contentLength - start, + offset: start + }), + rangeGetContentMD5: options.rangeGetContentMD5, + rangeGetContentCRC64: options.rangeGetContentCrc64, + snapshot: options.snapshot, + cpkInfo: options.customerProvidedKey + }; + return [4 /*yield*/, this.blobContext.download(tslib.__assign({ abortSignal: options.abortSignal }, updatedOptions))]; + case 1: + // Debug purpose only + // console.log( + // `Read from internal stream, range: ${ + // updatedOptions.range + // }, options: ${JSON.stringify(updatedOptions)}` + // ); + return [2 /*return*/, (_b.sent()).readableStreamBody]; + } + }); + }); }, offset, res_1.contentLength, { + maxRetryRequests: options.maxRetryRequests, + onProgress: options.onProgress + })]; + case 3: + e_1 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_1.message + }); + throw e_1; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Returns true if the Azure blob resource represented by this client exists; false otherwise. * @@ -81442,36 +83653,49 @@ class BlobClient extends StorageClient { * * @param options - options to Exists operation. */ - async exists(options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-exists", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - await this.getProperties({ - abortSignal: options.abortSignal, - customerProvidedKey: options.customerProvidedKey, - conditions: options.conditions, - tracingOptions: updatedOptions.tracingOptions - }); - return true; - } - catch (e) { - if (e.statusCode === 404) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when checking blob existence" - }); - return false; - } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobClient.prototype.exists = function (options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_2; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobClient-exists", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.getProperties({ + abortSignal: options.abortSignal, + customerProvidedKey: options.customerProvidedKey, + conditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions + })]; + case 2: + _b.sent(); + return [2 /*return*/, true]; + case 3: + e_2 = _b.sent(); + if (e_2.statusCode === 404) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when checking blob existence" + }); + return [2 /*return*/, false]; + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_2.message + }); + throw e_2; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Returns all user-defined metadata, standard HTTP properties, and system properties * for the blob. It does not return the content of the blob. @@ -81484,26 +83708,39 @@ class BlobClient extends StorageClient { * * @param options - Optional options to Get Properties operation. */ - async getProperties(options = {}) { + BlobClient.prototype.getProperties = function (options) { var _a; - const { span, updatedOptions } = createSpan("BlobClient-getProperties", options); - try { - options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const res = await this.blobContext.getProperties(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); - return Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, res, e_3; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("BlobClient-getProperties", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.blobContext.getProperties(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: + res = _c.sent(); + return [2 /*return*/, tslib.__assign(tslib.__assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) })]; + case 3: + e_3 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_3.message + }); + throw e_3; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Marks the specified blob or snapshot for deletion. The blob is later deleted * during garbage collection. Note that in order to delete a blob, you must delete @@ -81513,24 +83750,36 @@ class BlobClient extends StorageClient { * * @param options - Optional options to Blob Delete operation. */ - async delete(options = {}) { + BlobClient.prototype.delete = function (options) { var _a; - const { span, updatedOptions } = createSpan("BlobClient-delete", options); - options.conditions = options.conditions || {}; - try { - return await this.blobContext.delete(Object.assign({ abortSignal: options.abortSignal, deleteSnapshots: options.deleteSnapshots, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_4; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("BlobClient-delete", options), span = _b.span, updatedOptions = _b.updatedOptions; + options.conditions = options.conditions || {}; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.blobContext.delete(tslib.__assign({ abortSignal: options.abortSignal, deleteSnapshots: options.deleteSnapshots, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_4 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_4.message + }); + throw e_4; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted * during garbage collection. Note that in order to delete a blob, you must delete @@ -81540,32 +83789,45 @@ class BlobClient extends StorageClient { * * @param options - Optional options to Blob Delete operation. */ - async deleteIfExists(options = {}) { + BlobClient.prototype.deleteIfExists = function (options) { var _a, _b; - const { span, updatedOptions } = createSpan("BlobClient-deleteIfExists", options); - try { - const res = await this.delete(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when deleting a blob or snapshot only if it exists." - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); - } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _c, span, updatedOptions, res, e_5; + return tslib.__generator(this, function (_d) { + switch (_d.label) { + case 0: + _c = createSpan("BlobClient-deleteIfExists", options), span = _c.span, updatedOptions = _c.updatedOptions; + _d.label = 1; + case 1: + _d.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.delete(updatedOptions)]; + case 2: + res = _d.sent(); + return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable + })]; + case 3: + e_5 = _d.sent(); + if (((_a = e_5.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when deleting a blob or snapshot only if it exists." + }); + return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_5.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_5.response })]; + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_5.message + }); + throw e_5; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Restores the contents and metadata of soft deleted blob and any associated * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 @@ -81574,22 +83836,34 @@ class BlobClient extends StorageClient { * * @param options - Optional options to Blob Undelete operation. */ - async undelete(options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-undelete", options); - try { - return await this.blobContext.undelete(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobClient.prototype.undelete = function (options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_6; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobClient-undelete", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.blobContext.undelete(tslib.__assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_6 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_6.message + }); + throw e_6; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Sets system properties on the blob. * @@ -81602,25 +83876,37 @@ class BlobClient extends StorageClient { * headers without a value will be cleared. * @param options - Optional options to Blob Set HTTP Headers operation. */ - async setHTTPHeaders(blobHTTPHeaders, options = {}) { + BlobClient.prototype.setHTTPHeaders = function (blobHTTPHeaders, options) { var _a; - const { span, updatedOptions } = createSpan("BlobClient-setHTTPHeaders", options); - options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blobContext.setHttpHeaders(Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_7; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("BlobClient-setHTTPHeaders", options), span = _b.span, updatedOptions = _b.updatedOptions; + options.conditions = options.conditions || {}; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.blobContext.setHttpHeaders(tslib.__assign({ abortSignal: options.abortSignal, blobHttpHeaders: blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_7 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_7.message + }); + throw e_7; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Sets user-defined metadata for the specified blob as one or more name-value pairs. * @@ -81632,25 +83918,37 @@ class BlobClient extends StorageClient { * If no value provided the existing metadata will be removed. * @param options - Optional options to Set Metadata operation. */ - async setMetadata(metadata, options = {}) { + BlobClient.prototype.setMetadata = function (metadata, options) { var _a; - const { span, updatedOptions } = createSpan("BlobClient-setMetadata", options); - options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blobContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_8; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("BlobClient-setMetadata", options), span = _b.span, updatedOptions = _b.updatedOptions; + options.conditions = options.conditions || {}; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.blobContext.setMetadata(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: metadata, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_8 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_8.message + }); + throw e_8; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Sets tags on the underlying blob. * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. @@ -81660,81 +83958,118 @@ class BlobClient extends StorageClient { * @param tags - * @param options - */ - async setTags(tags, options = {}) { + BlobClient.prototype.setTags = function (tags, options) { var _a; - const { span, updatedOptions } = createSpan("BlobClient-setTags", options); - try { - return await this.blobContext.setTags(Object.assign(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)), { tags: toBlobTags(tags) })); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_9; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("BlobClient-setTags", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.blobContext.setTags(tslib.__assign(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)), { tags: toBlobTags(tags) }))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_9 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_9.message + }); + throw e_9; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Gets the tags associated with the underlying blob. * * @param options - */ - async getTags(options = {}) { + BlobClient.prototype.getTags = function (options) { var _a; - const { span, updatedOptions } = createSpan("BlobClient-getTags", options); - try { - const response = await this.blobContext.getTags(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} }); - return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, response, wrappedResponse, e_10; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("BlobClient-getTags", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.blobContext.getTags(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: + response = _c.sent(); + wrappedResponse = tslib.__assign(tslib.__assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} }); + return [2 /*return*/, wrappedResponse]; + case 3: + e_10 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_10.message + }); + throw e_10; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Get a {@link BlobLeaseClient} that manages leases on the blob. * * @param proposeLeaseId - Initial proposed lease Id. * @returns A new BlobLeaseClient object for managing leases on the blob. */ - getBlobLeaseClient(proposeLeaseId) { + BlobClient.prototype.getBlobLeaseClient = function (proposeLeaseId) { return new BlobLeaseClient(this, proposeLeaseId); - } + }; /** * Creates a read-only snapshot of a blob. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob * * @param options - Optional options to the Blob Create Snapshot operation. */ - async createSnapshot(options = {}) { + BlobClient.prototype.createSnapshot = function (options) { var _a; - const { span, updatedOptions } = createSpan("BlobClient-createSnapshot", options); - options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blobContext.createSnapshot(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_11; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("BlobClient-createSnapshot", options), span = _b.span, updatedOptions = _b.updatedOptions; + options.conditions = options.conditions || {}; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.blobContext.createSnapshot(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_11 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_11.message + }); + throw e_11; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Asynchronously copies a blob to a destination within the storage account. * This method returns a long running operation poller that allows you to wait @@ -81807,25 +84142,57 @@ class BlobClient extends StorageClient { * @param copySource - url to the source Azure Blob/File. * @param options - Optional options to the Blob Start Copy From URL operation. */ - async beginCopyFromURL(copySource, options = {}) { - const client = { - abortCopyFromURL: (...args) => this.abortCopyFromURL(...args), - getProperties: (...args) => this.getProperties(...args), - startCopyFromURL: (...args) => this.startCopyFromURL(...args) - }; - const poller = new BlobBeginCopyFromUrlPoller({ - blobClient: client, - copySource, - intervalInMs: options.intervalInMs, - onProgress: options.onProgress, - resumeFrom: options.resumeFrom, - startCopyFromURLOptions: options + BlobClient.prototype.beginCopyFromURL = function (copySource, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var client, poller; + var _this = this; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + client = { + abortCopyFromURL: function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return _this.abortCopyFromURL.apply(_this, args); + }, + getProperties: function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return _this.getProperties.apply(_this, args); + }, + startCopyFromURL: function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return _this.startCopyFromURL.apply(_this, args); + } + }; + poller = new BlobBeginCopyFromUrlPoller({ + blobClient: client, + copySource: copySource, + intervalInMs: options.intervalInMs, + onProgress: options.onProgress, + resumeFrom: options.resumeFrom, + startCopyFromURLOptions: options + }); + // Trigger the startCopyFromURL call by calling poll. + // Any errors from this method should be surfaced to the user. + return [4 /*yield*/, poller.poll()]; + case 1: + // Trigger the startCopyFromURL call by calling poll. + // Any errors from this method should be surfaced to the user. + _a.sent(); + return [2 /*return*/, poller]; + } + }); }); - // Trigger the startCopyFromURL call by calling poll. - // Any errors from this method should be surfaced to the user. - await poller.poll(); - return poller; - } + }; /** * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero * length and full metadata. Version 2012-02-12 and newer. @@ -81834,22 +84201,34 @@ class BlobClient extends StorageClient { * @param copyId - Id of the Copy From URL operation. * @param options - Optional options to the Blob Abort Copy From URL operation. */ - async abortCopyFromURL(copyId, options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-abortCopyFromURL", options); - try { - return await this.blobContext.abortCopyFromURL(copyId, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobClient.prototype.abortCopyFromURL = function (copyId, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_12; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobClient-abortCopyFromURL", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.blobContext.abortCopyFromURL(copyId, tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_12 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_12.message + }); + throw e_12; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not * return a response until the copy is complete. @@ -81858,30 +84237,42 @@ class BlobClient extends StorageClient { * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication * @param options - */ - async syncCopyFromURL(copySource, options = {}) { + BlobClient.prototype.syncCopyFromURL = function (copySource, options) { var _a; - const { span, updatedOptions } = createSpan("BlobClient-syncCopyFromURL", options); - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; - try { - return await this.blobContext.copyFromURL(copySource, Object.assign({ abortSignal: options.abortSignal, metadata: options.metadata, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince - }, sourceContentMD5: options.sourceContentMD5, blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_13; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("BlobClient-syncCopyFromURL", options), span = _b.span, updatedOptions = _b.updatedOptions; + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.blobContext.copyFromURL(copySource, tslib.__assign({ abortSignal: options.abortSignal, metadata: options.metadata, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince + }, sourceContentMD5: options.sourceContentMD5, blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_13 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_13.message + }); + throw e_13; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Sets the tier on a blob. The operation is allowed on a page blob in a premium * storage account and on a block blob in a blob storage account (locally redundant @@ -81893,119 +84284,159 @@ class BlobClient extends StorageClient { * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. * @param options - Optional options to the Blob Set Tier operation. */ - async setAccessTier(tier, options = {}) { + BlobClient.prototype.setAccessTier = function (tier, options) { var _a; - const { span, updatedOptions } = createSpan("BlobClient-setAccessTier", options); - try { - return await this.blobContext.setTier(toAccessTier(tier), Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), rehydratePriority: options.rehydratePriority }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } - finally { - span.end(); - } - } - async downloadToBuffer(param1, param2, param3, param4 = {}) { - let buffer; - let offset = 0; - let count = 0; - let options = param4; - if (param1 instanceof Buffer) { - buffer = param1; - offset = param2 || 0; - count = typeof param3 === "number" ? param3 : 0; - } - else { - offset = typeof param1 === "number" ? param1 : 0; - count = typeof param2 === "number" ? param2 : 0; - options = param3 || {}; - } - const { span, updatedOptions } = createSpan("BlobClient-downloadToBuffer", options); - try { - if (!options.blockSize) { - options.blockSize = 0; - } - if (options.blockSize < 0) { - throw new RangeError("blockSize option must be >= 0"); - } - if (options.blockSize === 0) { - options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; - } - if (offset < 0) { - throw new RangeError("offset option must be >= 0"); - } - if (count && count <= 0) { - throw new RangeError("count option must be greater than 0"); - } - if (!options.conditions) { - options.conditions = {}; - } - // Customer doesn't specify length, get it - if (!count) { - const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); - count = response.contentLength - offset; - if (count < 0) { - throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); - } - } - // Allocate the buffer of size = count if the buffer is not provided - if (!buffer) { - try { - buffer = Buffer.alloc(count); + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_14; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("BlobClient-setAccessTier", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.blobContext.setTier(toAccessTier(tier), tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), rehydratePriority: options.rehydratePriority }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_14 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_14.message + }); + throw e_14; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; } - catch (error) { - throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile".\t ${error.message}`); + }); + }); + }; + BlobClient.prototype.downloadToBuffer = function (param1, param2, param3, param4) { + if (param4 === void 0) { param4 = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var buffer, offset, count, options, _a, span, updatedOptions, response, transferProgress_1, batch, _loop_1, off, e_15; + var _this = this; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + offset = 0; + count = 0; + options = param4; + if (param1 instanceof Buffer) { + buffer = param1; + offset = param2 || 0; + count = typeof param3 === "number" ? param3 : 0; + } + else { + offset = typeof param1 === "number" ? param1 : 0; + count = typeof param2 === "number" ? param2 : 0; + options = param3 || {}; + } + _a = createSpan("BlobClient-downloadToBuffer", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 5, 6, 7]); + if (!options.blockSize) { + options.blockSize = 0; + } + if (options.blockSize < 0) { + throw new RangeError("blockSize option must be >= 0"); + } + if (options.blockSize === 0) { + options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + if (offset < 0) { + throw new RangeError("offset option must be >= 0"); + } + if (count && count <= 0) { + throw new RangeError("count option must be greater than 0"); + } + if (!options.conditions) { + options.conditions = {}; + } + if (!!count) return [3 /*break*/, 3]; + return [4 /*yield*/, this.getProperties(tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) }))]; + case 2: + response = _b.sent(); + count = response.contentLength - offset; + if (count < 0) { + throw new RangeError("offset " + offset + " shouldn't be larger than blob size " + response.contentLength); + } + _b.label = 3; + case 3: + // Allocate the buffer of size = count if the buffer is not provided + if (!buffer) { + try { + buffer = Buffer.alloc(count); + } + catch (error) { + throw new Error("Unable to allocate the buffer of size: " + count + "(in bytes). Please try passing your own buffer to the \"downloadToBuffer\" method or try using other methods like \"download\" or \"downloadToFile\".\t " + error.message); + } + } + if (buffer.length < count) { + throw new RangeError("The buffer's size should be equal to or larger than the request count of bytes: " + count); + } + transferProgress_1 = 0; + batch = new Batch(options.concurrency); + _loop_1 = function (off) { + batch.addOperation(function () { return tslib.__awaiter(_this, void 0, void 0, function () { + var chunkEnd, response, stream; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + chunkEnd = offset + count; + if (off + options.blockSize < chunkEnd) { + chunkEnd = off + options.blockSize; + } + return [4 /*yield*/, this.download(off, chunkEnd - off, { + abortSignal: options.abortSignal, + conditions: options.conditions, + maxRetryRequests: options.maxRetryRequestsPerBlock, + customerProvidedKey: options.customerProvidedKey, + tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) + })]; + case 1: + response = _a.sent(); + stream = response.readableStreamBody; + return [4 /*yield*/, streamToBuffer(stream, buffer, off - offset, chunkEnd - offset)]; + case 2: + _a.sent(); + // Update progress after block is downloaded, in case of block trying + // Could provide finer grained progress updating inside HTTP requests, + // only if convenience layer download try is enabled + transferProgress_1 += chunkEnd - off; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress_1 }); + } + return [2 /*return*/]; + } + }); + }); }); + }; + for (off = offset; off < offset + count; off = off + options.blockSize) { + _loop_1(off); + } + return [4 /*yield*/, batch.do()]; + case 4: + _b.sent(); + return [2 /*return*/, buffer]; + case 5: + e_15 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_15.message + }); + throw e_15; + case 6: + span.end(); + return [7 /*endfinally*/]; + case 7: return [2 /*return*/]; } - } - if (buffer.length < count) { - throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`); - } - let transferProgress = 0; - const batch = new Batch(options.concurrency); - for (let off = offset; off < offset + count; off = off + options.blockSize) { - batch.addOperation(async () => { - // Exclusive chunk end position - let chunkEnd = offset + count; - if (off + options.blockSize < chunkEnd) { - chunkEnd = off + options.blockSize; - } - const response = await this.download(off, chunkEnd - off, { - abortSignal: options.abortSignal, - conditions: options.conditions, - maxRetryRequests: options.maxRetryRequestsPerBlock, - customerProvidedKey: options.customerProvidedKey, - tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) - }); - const stream = response.readableStreamBody; - await streamToBuffer(stream, buffer, off - offset, chunkEnd - offset); - // Update progress after block is downloaded, in case of block trying - // Could provide finer grained progress updating inside HTTP requests, - // only if convenience layer download try is enabled - transferProgress += chunkEnd - off; - if (options.onProgress) { - options.onProgress({ loadedBytes: transferProgress }); - } - }); - } - await batch.do(); - return buffer; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * @@ -82022,31 +84453,48 @@ class BlobClient extends StorageClient { * content is already read and written into a local file * at the specified path. */ - async downloadToFile(filePath, offset = 0, count, options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-downloadToFile", options); - try { - const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); - if (response.readableStreamBody) { - await readStreamToLocalFile(response.readableStreamBody, filePath); - } - // The stream is no longer accessible so setting it to undefined. - response.blobDownloadStream = undefined; - return response; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobClient.prototype.downloadToFile = function (filePath, offset, count, options) { + if (offset === void 0) { offset = 0; } + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, response, e_16; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobClient-downloadToFile", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 5, 6, 7]); + return [4 /*yield*/, this.download(offset, count, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) }))]; + case 2: + response = _b.sent(); + if (!response.readableStreamBody) return [3 /*break*/, 4]; + return [4 /*yield*/, readStreamToLocalFile(response.readableStreamBody, filePath)]; + case 3: + _b.sent(); + _b.label = 4; + case 4: + // The stream is no longer accessible so setting it to undefined. + response.blobDownloadStream = undefined; + return [2 /*return*/, response]; + case 5: + e_16 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_16.message + }); + throw e_16; + case 6: + span.end(); + return [7 /*endfinally*/]; + case 7: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } - getBlobAndContainerNamesFromUrl() { - let containerName; - let blobName; + }); + }; + BlobClient.prototype.getBlobAndContainerNamesFromUrl = function () { + var containerName; + var blobName; try { // URL may look like the following // "https://myaccount.blob.core.windows.net/mycontainer/blob?sasString"; @@ -82055,11 +84503,11 @@ class BlobClient extends StorageClient { // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt"; // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob` // http://localhost:10001/devstoreaccount1/containername/blob - const parsedUrl = coreHttp.URLBuilder.parse(this.url); + var parsedUrl = coreHttp.URLBuilder.parse(this.url); if (parsedUrl.getHost().split(".")[1] === "blob") { // "https://myaccount.blob.core.windows.net/containername/blob". // .getPath() -> /containername/blob - const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); + var pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); containerName = pathComponents[1]; blobName = pathComponents[3]; } @@ -82067,14 +84515,14 @@ class BlobClient extends StorageClient { // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob // .getPath() -> /devstoreaccount1/containername/blob - const pathComponents = parsedUrl.getPath().match("/([^/]*)/([^/]*)(/(.*))?"); + var pathComponents = parsedUrl.getPath().match("/([^/]*)/([^/]*)(/(.*))?"); containerName = pathComponents[2]; blobName = pathComponents[4]; } else { // "https://customdomain.com/containername/blob". // .getPath() -> /containername/blob - const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); + var pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); containerName = pathComponents[1]; blobName = pathComponents[3]; } @@ -82087,12 +84535,12 @@ class BlobClient extends StorageClient { if (!containerName) { throw new Error("Provided containerName is invalid."); } - return { blobName, containerName }; + return { blobName: blobName, containerName: containerName }; } catch (error) { throw new Error("Unable to extract blobName and containerName with provided information."); } - } + }; /** * Asynchronously copies a blob to a destination within the storage account. * In version 2012-02-12 and later, the source for a Copy Blob operation can be @@ -82106,31 +84554,43 @@ class BlobClient extends StorageClient { * @param copySource - url to the source Azure Blob/File. * @param options - Optional options to the Blob Start Copy From URL operation. */ - async startCopyFromURL(copySource, options = {}) { + BlobClient.prototype.startCopyFromURL = function (copySource, options) { var _a; - const { span, updatedOptions } = createSpan("BlobClient-startCopyFromURL", options); - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; - try { - return await this.blobContext.startCopyFromURL(copySource, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, - sourceIfTags: options.sourceConditions.tagConditions - }, rehydratePriority: options.rehydratePriority, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), sealBlob: options.sealBlob }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_17; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("BlobClient-startCopyFromURL", options), span = _b.span, updatedOptions = _b.updatedOptions; + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.blobContext.startCopyFromURL(copySource, tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + sourceIfTags: options.sourceConditions.tagConditions + }, rehydratePriority: options.rehydratePriority, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), sealBlob: options.sealBlob }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_17 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_17.message + }); + throw e_17; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Only available for BlobClient constructed with a shared key credential. * @@ -82142,25 +84602,29 @@ class BlobClient extends StorageClient { * @param options - Optional parameters. * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ - generateSasUrl(options) { - return new Promise((resolve) => { - if (!(this.credential instanceof StorageSharedKeyCredential)) { + BlobClient.prototype.generateSasUrl = function (options) { + var _this = this; + return new Promise(function (resolve) { + if (!(_this.credential instanceof StorageSharedKeyCredential)) { throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); } - const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).toString(); - resolve(appendToURLQuery(this.url, sas)); + var sas = generateBlobSASQueryParameters(tslib.__assign({ containerName: _this._containerName, blobName: _this._name, snapshotTime: _this._snapshot, versionId: _this._versionId }, options), _this.credential).toString(); + resolve(appendToURLQuery(_this.url, sas)); }); - } -} + }; + return BlobClient; +}(StorageClient)); /** * AppendBlobClient defines a set of operations applicable to append blobs. */ -class AppendBlobClient extends BlobClient { - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { +var AppendBlobClient = /** @class */ (function (_super) { + tslib.__extends(AppendBlobClient, _super); + function AppendBlobClient(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { + var _this = this; // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); - let pipeline; - let url; + var pipeline; + var url; options = options || {}; if (credentialOrPipelineOrContainerName instanceof Pipeline) { // (url: string, pipeline: Pipeline) @@ -82187,12 +84651,12 @@ class AppendBlobClient extends BlobClient { blobNameOrOptions && typeof blobNameOrOptions === "string") { // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) - const containerName = credentialOrPipelineOrContainerName; - const blobName = blobNameOrOptions; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + var containerName = credentialOrPipelineOrContainerName; + var blobName = blobNameOrOptions; + var extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); pipeline = newPipeline(sharedKeyCredential, options); @@ -82212,8 +84676,9 @@ class AppendBlobClient extends BlobClient { else { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } - super(url, pipeline); - this.appendBlobContext = new AppendBlob(this.storageClientContext); + _this = _super.call(this, url, pipeline) || this; + _this.appendBlobContext = new AppendBlob(_this.storageClientContext); + return _this; } /** * Creates a new AppendBlobClient object identical to the source but with the @@ -82223,9 +84688,9 @@ class AppendBlobClient extends BlobClient { * @param snapshot - The snapshot timestamp. * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. */ - withSnapshot(snapshot) { + AppendBlobClient.prototype.withSnapshot = function (snapshot) { return new AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); - } + }; /** * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. * @see https://docs.microsoft.com/rest/api/storageservices/put-blob @@ -82240,25 +84705,37 @@ class AppendBlobClient extends BlobClient { * await appendBlobClient.create(); * ``` */ - async create(options = {}) { + AppendBlobClient.prototype.create = function (options) { var _a; - const { span, updatedOptions } = createSpan("AppendBlobClient-create", options); - options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.appendBlobContext.create(0, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_18; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("AppendBlobClient-create", options), span = _b.span, updatedOptions = _b.updatedOptions; + options.conditions = options.conditions || {}; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.appendBlobContext.create(0, tslib.__assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_18 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_18.message + }); + throw e_18; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. * If the blob with the same name already exists, the content of the existing blob will remain unchanged. @@ -82266,56 +84743,81 @@ class AppendBlobClient extends BlobClient { * * @param options - */ - async createIfNotExists(options = {}) { + AppendBlobClient.prototype.createIfNotExists = function (options) { var _a, _b; - const { span, updatedOptions } = createSpan("AppendBlobClient-createIfNotExists", options); - const conditions = { ifNoneMatch: ETagAny }; - try { - const res = await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions })); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a blob only if it does not already exist." - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); - } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _c, span, updatedOptions, conditions, res, e_19; + return tslib.__generator(this, function (_d) { + switch (_d.label) { + case 0: + _c = createSpan("AppendBlobClient-createIfNotExists", options), span = _c.span, updatedOptions = _c.updatedOptions; + conditions = { ifNoneMatch: ETagAny }; + _d.label = 1; + case 1: + _d.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.create(tslib.__assign(tslib.__assign({}, updatedOptions), { conditions: conditions }))]; + case 2: + res = _d.sent(); + return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable + })]; + case 3: + e_19 = _d.sent(); + if (((_a = e_19.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when creating a blob only if it does not already exist." + }); + return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_19.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_19.response })]; + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_19.message + }); + throw e_19; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Seals the append blob, making it read only. * * @param options - */ - async seal(options = {}) { + AppendBlobClient.prototype.seal = function (options) { var _a; - const { span, updatedOptions } = createSpan("AppendBlobClient-seal", options); - options.conditions = options.conditions || {}; - try { - return await this.appendBlobContext.seal(Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_20; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("AppendBlobClient-seal", options), span = _b.span, updatedOptions = _b.updatedOptions; + options.conditions = options.conditions || {}; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.appendBlobContext.seal(tslib.__assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_20 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_20.message + }); + throw e_20; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Commits a new block of data to the end of the existing append blob. * @see https://docs.microsoft.com/rest/api/storageservices/append-block @@ -82340,27 +84842,39 @@ class AppendBlobClient extends BlobClient { * await existingAppendBlobClient.appendBlock(content, content.length); * ``` */ - async appendBlock(body, contentLength, options = {}) { + AppendBlobClient.prototype.appendBlock = function (body, contentLength, options) { var _a; - const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlock", options); - options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.appendBlobContext.appendBlock(contentLength, body, Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress - }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_21; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("AppendBlobClient-appendBlock", options), span = _b.span, updatedOptions = _b.updatedOptions; + options.conditions = options.conditions || {}; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.appendBlobContext.appendBlock(contentLength, body, tslib.__assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress + }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_21 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_21.message + }); + throw e_21; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * The Append Block operation commits a new block of data to the end of an existing append blob * where the contents are read from a source url. @@ -82375,41 +84889,56 @@ class AppendBlobClient extends BlobClient { * @param count - Number of bytes to be appended as a block * @param options - */ - async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { + AppendBlobClient.prototype.appendBlockFromURL = function (sourceURL, sourceOffset, count, options) { var _a; - const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlockFromURL", options); - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, Object.assign({ abortSignal: options.abortSignal, sourceRange: rangeToString({ offset: sourceOffset, count }), sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, appendPositionAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince - }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_22; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("AppendBlobClient-appendBlockFromURL", options), span = _b.span, updatedOptions = _b.updatedOptions; + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, tslib.__assign({ abortSignal: options.abortSignal, sourceRange: rangeToString({ offset: sourceOffset, count: count }), sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, appendPositionAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_22 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_22.message + }); + throw e_22; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } -} + }); + }; + return AppendBlobClient; +}(BlobClient)); /** * BlockBlobClient defines a set of operations applicable to block blobs. */ -class BlockBlobClient extends BlobClient { - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { +var BlockBlobClient = /** @class */ (function (_super) { + tslib.__extends(BlockBlobClient, _super); + function BlockBlobClient(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { + var _this = this; // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); - let pipeline; - let url; + var pipeline; + var url; options = options || {}; if (credentialOrPipelineOrContainerName instanceof Pipeline) { // (url: string, pipeline: Pipeline) @@ -82436,12 +84965,12 @@ class BlockBlobClient extends BlobClient { blobNameOrOptions && typeof blobNameOrOptions === "string") { // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) - const containerName = credentialOrPipelineOrContainerName; - const blobName = blobNameOrOptions; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + var containerName = credentialOrPipelineOrContainerName; + var blobName = blobNameOrOptions; + var extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); pipeline = newPipeline(sharedKeyCredential, options); @@ -82461,9 +84990,10 @@ class BlockBlobClient extends BlobClient { else { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } - super(url, pipeline); - this.blockBlobContext = new BlockBlob(this.storageClientContext); - this._blobContext = new Blob$1(this.storageClientContext); + _this = _super.call(this, url, pipeline) || this; + _this.blockBlobContext = new BlockBlob(_this.storageClientContext); + _this._blobContext = new Blob$1(_this.storageClientContext); + return _this; } /** * Creates a new BlockBlobClient object identical to the source but with the @@ -82473,9 +85003,9 @@ class BlockBlobClient extends BlobClient { * @param snapshot - The snapshot timestamp. * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. */ - withSnapshot(snapshot) { + BlockBlobClient.prototype.withSnapshot = function (snapshot) { return new BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); - } + }; /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * @@ -82506,35 +85036,47 @@ class BlockBlobClient extends BlobClient { * @param query - * @param options - */ - async query(query, options = {}) { + BlockBlobClient.prototype.query = function (query, options) { var _a; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const { span, updatedOptions } = createSpan("BlockBlobClient-query", options); - try { - if (false) {} - const response = await this._blobContext.query(Object.assign({ abortSignal: options.abortSignal, queryRequest: { - queryType: "SQL", - expression: query, - inputSerialization: toQuerySerialization(options.inputTextConfiguration), - outputSerialization: toQuerySerialization(options.outputTextConfiguration) - }, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - return new BlobQueryResponse(response, { - abortSignal: options.abortSignal, - onProgress: options.onProgress, - onError: options.onError - }); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, response, e_23; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + _b = createSpan("BlockBlobClient-query", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this._blobContext.query(tslib.__assign({ abortSignal: options.abortSignal, queryRequest: { + queryType: "SQL", + expression: query, + inputSerialization: toQuerySerialization(options.inputTextConfiguration), + outputSerialization: toQuerySerialization(options.outputTextConfiguration) + }, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: + response = _c.sent(); + return [2 /*return*/, new BlobQueryResponse(response, { + abortSignal: options.abortSignal, + onProgress: options.onProgress, + onError: options.onError + })]; + case 3: + e_23 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_23.message + }); + throw e_23; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Creates a new block blob, or updates the content of an existing block blob. * Updating an existing block blob overwrites any existing metadata on the blob. @@ -82562,27 +85104,39 @@ class BlockBlobClient extends BlobClient { * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); * ``` */ - async upload(body, contentLength, options = {}) { + BlockBlobClient.prototype.upload = function (body, contentLength, options) { var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("BlockBlobClient-upload", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.upload(contentLength, body, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress - }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_24; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + options.conditions = options.conditions || {}; + _b = createSpan("BlockBlobClient-upload", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.blockBlobContext.upload(contentLength, body, tslib.__assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_24 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_24.message + }); + throw e_24; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Creates a new Block Blob where the contents of the blob are read from a given URL. * This API is supported beginning with the 2020-04-08 version. Partial updates @@ -82601,31 +85155,43 @@ class BlockBlobClient extends BlobClient { * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= * @param options - Optional parameters. */ - async syncUploadFromURL(sourceURL, options = {}) { + BlockBlobClient.prototype.syncUploadFromURL = function (sourceURL, options) { var _a, _b, _c, _d, _e; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("BlockBlobClient-syncUploadFromURL", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: options.conditions.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: (_a = options.sourceConditions) === null || _a === void 0 ? void 0 : _a.ifMatch, - sourceIfModifiedSince: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifModifiedSince, - sourceIfNoneMatch: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch, - sourceIfUnmodifiedSince: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifUnmodifiedSince, - sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions - }, cpkInfo: options.customerProvidedKey, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }), convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _f, span, updatedOptions, e_25; + return tslib.__generator(this, function (_g) { + switch (_g.label) { + case 0: + options.conditions = options.conditions || {}; + _f = createSpan("BlockBlobClient-syncUploadFromURL", options), span = _f.span, updatedOptions = _f.updatedOptions; + _g.label = 1; + case 1: + _g.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.blockBlobContext.putBlobFromUrl(0, sourceURL, tslib.__assign(tslib.__assign(tslib.__assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: options.conditions.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: (_a = options.sourceConditions) === null || _a === void 0 ? void 0 : _a.ifMatch, + sourceIfModifiedSince: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifModifiedSince, + sourceIfNoneMatch: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch, + sourceIfUnmodifiedSince: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifUnmodifiedSince, + sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions + }, cpkInfo: options.customerProvidedKey, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }), convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _g.sent()]; + case 3: + e_25 = _g.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_25.message + }); + throw e_25; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Uploads the specified block to the block blob's "staging area" to be later * committed by a call to commitBlockList. @@ -82637,25 +85203,37 @@ class BlockBlobClient extends BlobClient { * @param options - Options to the Block Blob Stage Block operation. * @returns Response data for the Block Blob Stage Block operation. */ - async stageBlock(blockId, body, contentLength, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlock", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.stageBlock(blockId, contentLength, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, requestOptions: { - onUploadProgress: options.onProgress - }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlockBlobClient.prototype.stageBlock = function (blockId, body, contentLength, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_26; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlockBlobClient-stageBlock", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.blockBlobContext.stageBlock(blockId, contentLength, body, tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, requestOptions: { + onUploadProgress: options.onProgress + }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_26 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_26.message + }); + throw e_26; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * The Stage Block From URL operation creates a new block to be committed as part * of a blob where the contents are read from a URL. @@ -82677,23 +85255,36 @@ class BlockBlobClient extends BlobClient { * @param options - Options to the Block Blob Stage Block From URL operation. * @returns Response data for the Block Blob Stage Block From URL operation. */ - async stageBlockFromURL(blockId, sourceURL, offset = 0, count, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlockFromURL", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset, count }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlockBlobClient.prototype.stageBlockFromURL = function (blockId, sourceURL, offset, count, options) { + if (offset === void 0) { offset = 0; } + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_27; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlockBlobClient-stageBlockFromURL", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset: offset, count: count }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_27 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_27.message + }); + throw e_27; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Writes a blob by specifying the list of block IDs that make up the blob. * In order to be written as part of a blob, a block must have been successfully written @@ -82706,25 +85297,37 @@ class BlockBlobClient extends BlobClient { * @param options - Options to the Block Blob Commit Block List operation. * @returns Response data for the Block Blob Commit Block List operation. */ - async commitBlockList(blocks, options = {}) { + BlockBlobClient.prototype.commitBlockList = function (blocks, options) { var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("BlockBlobClient-commitBlockList", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.commitBlockList({ latest: blocks }, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_28; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + options.conditions = options.conditions || {}; + _b = createSpan("BlockBlobClient-commitBlockList", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.blockBlobContext.commitBlockList({ latest: blocks }, tslib.__assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_28 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_28.message + }); + throw e_28; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Returns the list of blocks that have been uploaded as part of a block blob * using the specified block list filter. @@ -82735,30 +85338,43 @@ class BlockBlobClient extends BlobClient { * @param options - Options to the Block Blob Get Block List operation. * @returns Response data for the Block Blob Get Block List operation. */ - async getBlockList(listType, options = {}) { + BlockBlobClient.prototype.getBlockList = function (listType, options) { var _a; - const { span, updatedOptions } = createSpan("BlockBlobClient-getBlockList", options); - try { - const res = await this.blockBlobContext.getBlockList(listType, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - if (!res.committedBlocks) { - res.committedBlocks = []; - } - if (!res.uncommittedBlocks) { - res.uncommittedBlocks = []; - } - return res; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, res, e_29; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("BlockBlobClient-getBlockList", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.blockBlobContext.getBlockList(listType, tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: + res = _c.sent(); + if (!res.committedBlocks) { + res.committedBlocks = []; + } + if (!res.uncommittedBlocks) { + res.uncommittedBlocks = []; + } + return [2 /*return*/, res]; + case 3: + e_29 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_29.message + }); + throw e_29; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; // High level functions /** * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. @@ -82771,36 +85387,42 @@ class BlockBlobClient extends BlobClient { * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView * @param options - */ - async uploadData(data, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadData", options); - try { - if (true) { - let buffer; - if (data instanceof Buffer) { - buffer = data; + BlockBlobClient.prototype.uploadData = function (data, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, buffer_1, browserBlob_1; + return tslib.__generator(this, function (_b) { + _a = createSpan("BlockBlobClient-uploadData", options), span = _a.span, updatedOptions = _a.updatedOptions; + try { + if (true) { + if (data instanceof Buffer) { + buffer_1 = data; + } + else if (data instanceof ArrayBuffer) { + buffer_1 = Buffer.from(data); + } + else { + data = data; + buffer_1 = Buffer.from(data.buffer, data.byteOffset, data.byteLength); + } + return [2 /*return*/, this.uploadSeekableInternal(function (offset, size) { return buffer_1.slice(offset, offset + size); }, buffer_1.byteLength, updatedOptions)]; + } + else {} } - else if (data instanceof ArrayBuffer) { - buffer = Buffer.from(data); + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; } - else { - data = data; - buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength); + finally { + span.end(); } - return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); - } - else {} - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + return [2 /*return*/]; }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * ONLY AVAILABLE IN BROWSERS. * @@ -82816,23 +85438,35 @@ class BlockBlobClient extends BlobClient { * @param options - Options to upload browser data. * @returns Response data for the Blob Upload operation. */ - async uploadBrowserData(browserData, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadBrowserData", options); - try { - const browserBlob = new Blob([browserData]); - return await this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlockBlobClient.prototype.uploadBrowserData = function (browserData, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, browserBlob_2, e_30; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlockBlobClient-uploadBrowserData", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + browserBlob_2 = new Blob([browserData]); + return [4 /*yield*/, this.uploadSeekableInternal(function (offset, size) { return browserBlob_2.slice(offset, offset + size); }, browserBlob_2.size, updatedOptions)]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_30 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_30.message + }); + throw e_30; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * * Uploads data to block blob. Requires a bodyFactory as the data source, @@ -82848,88 +85482,115 @@ class BlockBlobClient extends BlobClient { * @param options - Options to Upload to Block Blob operation. * @returns Response data for the Blob Upload operation. */ - async uploadSeekableInternal(bodyFactory, size, options = {}) { - if (!options.blockSize) { - options.blockSize = 0; - } - if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { - throw new RangeError(`blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); - } - if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) { - options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; - } - if (options.maxSingleShotSize < 0 || - options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { - throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); - } - if (options.blockSize === 0) { - if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) { - throw new RangeError(`${size} is too larger to upload to a block blob.`); - } - if (size > options.maxSingleShotSize) { - options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); - if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { - options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; - } - } - } - if (!options.blobHTTPHeaders) { - options.blobHTTPHeaders = {}; - } - if (!options.conditions) { - options.conditions = {}; - } - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadSeekableInternal", options); - try { - if (size <= options.maxSingleShotSize) { - return await this.upload(bodyFactory(0, size), size, updatedOptions); - } - const numBlocks = Math.floor((size - 1) / options.blockSize) + 1; - if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) { - throw new RangeError(`The buffer's size is too big or the BlockSize is too small;` + - `the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`); - } - const blockList = []; - const blockIDPrefix = coreHttp.generateUuid(); - let transferProgress = 0; - const batch = new Batch(options.concurrency); - for (let i = 0; i < numBlocks; i++) { - batch.addOperation(async () => { - const blockID = generateBlockID(blockIDPrefix, i); - const start = options.blockSize * i; - const end = i === numBlocks - 1 ? size : start + options.blockSize; - const contentLength = end - start; - blockList.push(blockID); - await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, { - abortSignal: options.abortSignal, - conditions: options.conditions, - encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions - }); - // Update progress after block is successfully uploaded to server, in case of block trying - // TODO: Hook with convenience layer progress event in finer level - transferProgress += contentLength; - if (options.onProgress) { - options.onProgress({ - loadedBytes: transferProgress + BlockBlobClient.prototype.uploadSeekableInternal = function (bodyFactory, size, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, numBlocks_1, blockList_1, blockIDPrefix_1, transferProgress_2, batch, _loop_2, i, e_31; + var _this = this; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + if (!options.blockSize) { + options.blockSize = 0; + } + if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { + throw new RangeError("blockSize option must be >= 0 and <= " + BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES); + } + if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) { + options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; + } + if (options.maxSingleShotSize < 0 || + options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { + throw new RangeError("maxSingleShotSize option must be >= 0 and <= " + BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES); + } + if (options.blockSize === 0) { + if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(size + " is too larger to upload to a block blob."); + } + if (size > options.maxSingleShotSize) { + options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); + if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { + options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + } + } + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + _a = createSpan("BlockBlobClient-uploadSeekableInternal", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 5, 6, 7]); + if (!(size <= options.maxSingleShotSize)) return [3 /*break*/, 3]; + return [4 /*yield*/, this.upload(bodyFactory(0, size), size, updatedOptions)]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + numBlocks_1 = Math.floor((size - 1) / options.blockSize) + 1; + if (numBlocks_1 > BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError("The buffer's size is too big or the BlockSize is too small;" + + ("the number of blocks must be <= " + BLOCK_BLOB_MAX_BLOCKS)); + } + blockList_1 = []; + blockIDPrefix_1 = coreHttp.generateUuid(); + transferProgress_2 = 0; + batch = new Batch(options.concurrency); + _loop_2 = function (i) { + batch.addOperation(function () { return tslib.__awaiter(_this, void 0, void 0, function () { + var blockID, start, end, contentLength; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + blockID = generateBlockID(blockIDPrefix_1, i); + start = options.blockSize * i; + end = i === numBlocks_1 - 1 ? size : start + options.blockSize; + contentLength = end - start; + blockList_1.push(blockID); + return [4 /*yield*/, this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, { + abortSignal: options.abortSignal, + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions + })]; + case 1: + _a.sent(); + // Update progress after block is successfully uploaded to server, in case of block trying + // TODO: Hook with convenience layer progress event in finer level + transferProgress_2 += contentLength; + if (options.onProgress) { + options.onProgress({ + loadedBytes: transferProgress_2 + }); + } + return [2 /*return*/]; + } + }); + }); }); + }; + for (i = 0; i < numBlocks_1; i++) { + _loop_2(i); + } + return [4 /*yield*/, batch.do()]; + case 4: + _b.sent(); + return [2 /*return*/, this.commitBlockList(blockList_1, updatedOptions)]; + case 5: + e_31 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_31.message }); - } - }); - } - await batch.do(); - return this.commitBlockList(blockList, updatedOptions); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + throw e_31; + case 6: + span.end(); + return [7 /*endfinally*/]; + case 7: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * @@ -82943,29 +85604,45 @@ class BlockBlobClient extends BlobClient { * @param options - Options to Upload to Block Blob operation. * @returns Response data for the Blob Upload operation. */ - async uploadFile(filePath, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadFile", options); - try { - const size = (await fsStat(filePath)).size; - return await this.uploadSeekableInternal((offset, count) => { - return () => fsCreateReadStream(filePath, { - autoClose: true, - end: count ? offset + count - 1 : Infinity, - start: offset - }); - }, size, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlockBlobClient.prototype.uploadFile = function (filePath, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, size, e_32; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlockBlobClient-uploadFile", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 4, 5, 6]); + return [4 /*yield*/, fsStat(filePath)]; + case 2: + size = (_b.sent()).size; + return [4 /*yield*/, this.uploadSeekableInternal(function (offset, count) { + return function () { + return fsCreateReadStream(filePath, { + autoClose: true, + end: count ? offset + count - 1 : Infinity, + start: offset + }); + }; + }, size, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) }))]; + case 3: return [2 /*return*/, _b.sent()]; + case 4: + e_32 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_32.message + }); + throw e_32; + case 5: + span.end(); + return [7 /*endfinally*/]; + case 6: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * @@ -82982,63 +85659,92 @@ class BlockBlobClient extends BlobClient { * @param options - Options to Upload Stream to Block Blob operation. * @returns Response data for the Blob Upload operation. */ - async uploadStream(stream, bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { - if (!options.blobHTTPHeaders) { - options.blobHTTPHeaders = {}; - } - if (!options.conditions) { - options.conditions = {}; - } - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadStream", options); - try { - let blockNum = 0; - const blockIDPrefix = coreHttp.generateUuid(); - let transferProgress = 0; - const blockList = []; - const scheduler = new BufferScheduler(stream, bufferSize, maxConcurrency, async (body, length) => { - const blockID = generateBlockID(blockIDPrefix, blockNum); - blockList.push(blockID); - blockNum++; - await this.stageBlock(blockID, body, length, { - conditions: options.conditions, - encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions - }); - // Update progress after block is successfully uploaded to server, in case of block trying - transferProgress += length; - if (options.onProgress) { - options.onProgress({ loadedBytes: transferProgress }); - } - }, - // concurrency should set a smaller value than maxConcurrency, which is helpful to - // reduce the possibility when a outgoing handler waits for stream data, in - // this situation, outgoing handlers are blocked. - // Outgoing queue shouldn't be empty. - Math.ceil((maxConcurrency / 4) * 3)); - await scheduler.do(); - return await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlockBlobClient.prototype.uploadStream = function (stream, bufferSize, maxConcurrency, options) { + if (bufferSize === void 0) { bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES; } + if (maxConcurrency === void 0) { maxConcurrency = 5; } + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, blockNum_1, blockIDPrefix_2, transferProgress_3, blockList_2, scheduler, e_33; + var _this = this; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + _a = createSpan("BlockBlobClient-uploadStream", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 4, 5, 6]); + blockNum_1 = 0; + blockIDPrefix_2 = coreHttp.generateUuid(); + transferProgress_3 = 0; + blockList_2 = []; + scheduler = new BufferScheduler(stream, bufferSize, maxConcurrency, function (body, length) { return tslib.__awaiter(_this, void 0, void 0, function () { + var blockID; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + blockID = generateBlockID(blockIDPrefix_2, blockNum_1); + blockList_2.push(blockID); + blockNum_1++; + return [4 /*yield*/, this.stageBlock(blockID, body, length, { + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions + })]; + case 1: + _a.sent(); + // Update progress after block is successfully uploaded to server, in case of block trying + transferProgress_3 += length; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress_3 }); + } + return [2 /*return*/]; + } + }); + }); }, + // concurrency should set a smaller value than maxConcurrency, which is helpful to + // reduce the possibility when a outgoing handler waits for stream data, in + // this situation, outgoing handlers are blocked. + // Outgoing queue shouldn't be empty. + Math.ceil((maxConcurrency / 4) * 3)); + return [4 /*yield*/, scheduler.do()]; + case 2: + _b.sent(); + return [4 /*yield*/, this.commitBlockList(blockList_2, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) }))]; + case 3: return [2 /*return*/, _b.sent()]; + case 4: + e_33 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_33.message + }); + throw e_33; + case 5: + span.end(); + return [7 /*endfinally*/]; + case 6: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } -} + }); + }; + return BlockBlobClient; +}(BlobClient)); /** * PageBlobClient defines a set of operations applicable to page blobs. */ -class PageBlobClient extends BlobClient { - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { +var PageBlobClient = /** @class */ (function (_super) { + tslib.__extends(PageBlobClient, _super); + function PageBlobClient(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { + var _this = this; // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); - let pipeline; - let url; + var pipeline; + var url; options = options || {}; if (credentialOrPipelineOrContainerName instanceof Pipeline) { // (url: string, pipeline: Pipeline) @@ -83065,12 +85771,12 @@ class PageBlobClient extends BlobClient { blobNameOrOptions && typeof blobNameOrOptions === "string") { // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) - const containerName = credentialOrPipelineOrContainerName; - const blobName = blobNameOrOptions; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + var containerName = credentialOrPipelineOrContainerName; + var blobName = blobNameOrOptions; + var extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); pipeline = newPipeline(sharedKeyCredential, options); @@ -83090,8 +85796,9 @@ class PageBlobClient extends BlobClient { else { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } - super(url, pipeline); - this.pageBlobContext = new PageBlob(this.storageClientContext); + _this = _super.call(this, url, pipeline) || this; + _this.pageBlobContext = new PageBlob(_this.storageClientContext); + return _this; } /** * Creates a new PageBlobClient object identical to the source but with the @@ -83101,9 +85808,9 @@ class PageBlobClient extends BlobClient { * @param snapshot - The snapshot timestamp. * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. */ - withSnapshot(snapshot) { + PageBlobClient.prototype.withSnapshot = function (snapshot) { return new PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); - } + }; /** * Creates a page blob of the specified length. Call uploadPages to upload data * data to a page blob. @@ -83113,25 +85820,37 @@ class PageBlobClient extends BlobClient { * @param options - Options to the Page Blob Create operation. * @returns Response data for the Page Blob Create operation. */ - async create(size, options = {}) { + PageBlobClient.prototype.create = function (size, options) { var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-create", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.pageBlobContext.create(0, size, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, blobSequenceNumber: options.blobSequenceNumber, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_34; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + options.conditions = options.conditions || {}; + _b = createSpan("PageBlobClient-create", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.pageBlobContext.create(0, size, tslib.__assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, blobSequenceNumber: options.blobSequenceNumber, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_34 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_34.message + }); + throw e_34; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Creates a page blob of the specified length. Call uploadPages to upload data * data to a page blob. If the blob with the same name already exists, the content @@ -83141,33 +85860,46 @@ class PageBlobClient extends BlobClient { * @param size - size of the page blob. * @param options - */ - async createIfNotExists(size, options = {}) { + PageBlobClient.prototype.createIfNotExists = function (size, options) { var _a, _b; - const { span, updatedOptions } = createSpan("PageBlobClient-createIfNotExists", options); - try { - const conditions = { ifNoneMatch: ETagAny }; - const res = await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions })); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a blob only if it does not already exist." - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); - } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _c, span, updatedOptions, conditions, res, e_35; + return tslib.__generator(this, function (_d) { + switch (_d.label) { + case 0: + _c = createSpan("PageBlobClient-createIfNotExists", options), span = _c.span, updatedOptions = _c.updatedOptions; + _d.label = 1; + case 1: + _d.trys.push([1, 3, 4, 5]); + conditions = { ifNoneMatch: ETagAny }; + return [4 /*yield*/, this.create(size, tslib.__assign(tslib.__assign({}, options), { conditions: conditions, tracingOptions: updatedOptions.tracingOptions }))]; + case 2: + res = _d.sent(); + return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable + })]; + case 3: + e_35 = _d.sent(); + if (((_a = e_35.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when creating a blob only if it does not already exist." + }); + return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_35.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_35.response })]; + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_35.message + }); + throw e_35; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. * @see https://docs.microsoft.com/rest/api/storageservices/put-page @@ -83178,27 +85910,39 @@ class PageBlobClient extends BlobClient { * @param options - Options to the Page Blob Upload Pages operation. * @returns Response data for the Page Blob Upload Pages operation. */ - async uploadPages(body, offset, count, options = {}) { + PageBlobClient.prototype.uploadPages = function (body, offset, count, options) { var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-uploadPages", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.pageBlobContext.uploadPages(count, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress - }, range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_36; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + options.conditions = options.conditions || {}; + _b = createSpan("PageBlobClient-uploadPages", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.pageBlobContext.uploadPages(count, body, tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress + }, range: rangeToString({ offset: offset, count: count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_36 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_36.message + }); + throw e_36; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * The Upload Pages operation writes a range of pages to a page blob where the * contents are read from a URL. @@ -83210,31 +85954,43 @@ class PageBlobClient extends BlobClient { * @param count - Number of bytes to be uploaded from source page blob * @param options - */ - async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { + PageBlobClient.prototype.uploadPagesFromURL = function (sourceURL, sourceOffset, destOffset, count, options) { var _a; - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-uploadPagesFromURL", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), Object.assign({ abortSignal: options.abortSignal, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, sequenceNumberAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince - }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_37; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + _b = createSpan("PageBlobClient-uploadPagesFromURL", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return [4 /*yield*/, this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count: count }), 0, rangeToString({ offset: destOffset, count: count }), tslib.__assign({ abortSignal: options.abortSignal, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, sequenceNumberAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_37 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_37.message + }); + throw e_37; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Frees the specified pages from the page blob. * @see https://docs.microsoft.com/rest/api/storageservices/put-page @@ -83244,24 +86000,37 @@ class PageBlobClient extends BlobClient { * @param options - Options to the Page Blob Clear Pages operation. * @returns Response data for the Page Blob Clear Pages operation. */ - async clearPages(offset = 0, count, options = {}) { + PageBlobClient.prototype.clearPages = function (offset, count, options) { var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-clearPages", options); - try { - return await this.pageBlobContext.clearPages(0, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (offset === void 0) { offset = 0; } + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_38; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + options.conditions = options.conditions || {}; + _b = createSpan("PageBlobClient-clearPages", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.pageBlobContext.clearPages(0, tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset: offset, count: count }), sequenceNumberAccessConditions: options.conditions, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_38 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_38.message + }); + throw e_38; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Returns the list of valid page ranges for a page blob or snapshot of a page blob. * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges @@ -83271,26 +86040,39 @@ class PageBlobClient extends BlobClient { * @param options - Options to the Page Blob Get Ranges operation. * @returns Response data for the Page Blob Get Ranges operation. */ - async getPageRanges(offset = 0, count, options = {}) { + PageBlobClient.prototype.getPageRanges = function (offset, count, options) { var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRanges", options); - try { - return await this.pageBlobContext - .getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) - .then(rangeResponseFromModel); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (offset === void 0) { offset = 0; } + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_39; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + options.conditions = options.conditions || {}; + _b = createSpan("PageBlobClient-getPageRanges", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.pageBlobContext + .getPageRanges(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset: offset, count: count }) }, convertTracingToRequestOptionsBase(updatedOptions))) + .then(rangeResponseFromModel)]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_39 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_39.message + }); + throw e_39; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Gets the collection of page ranges that differ between a specified snapshot and this page blob. * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges @@ -83301,26 +86083,38 @@ class PageBlobClient extends BlobClient { * @param options - Options to the Page Blob Get Page Ranges Diff operation. * @returns Response data for the Page Blob Get Page Range Diff operation. */ - async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { + PageBlobClient.prototype.getPageRangesDiff = function (offset, count, prevSnapshot, options) { var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiff", options); - try { - return await this.pageBlobContext - .getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshot, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) - .then(rangeResponseFromModel); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_40; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + options.conditions = options.conditions || {}; + _b = createSpan("PageBlobClient-getPageRangesDiff", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.pageBlobContext + .getPageRangesDiff(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshot, range: rangeToString({ offset: offset, count: count }) }, convertTracingToRequestOptionsBase(updatedOptions))) + .then(rangeResponseFromModel)]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_40 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_40.message + }); + throw e_40; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges @@ -83331,26 +86125,38 @@ class PageBlobClient extends BlobClient { * @param options - Options to the Page Blob Get Page Ranges Diff operation. * @returns Response data for the Page Blob Get Page Range Diff operation. */ - async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl, options = {}) { + PageBlobClient.prototype.getPageRangesDiffForManagedDisks = function (offset, count, prevSnapshotUrl, options) { var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options); - try { - return await this.pageBlobContext - .getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevSnapshotUrl, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) - .then(rangeResponseFromModel); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_41; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + options.conditions = options.conditions || {}; + _b = createSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.pageBlobContext + .getPageRangesDiff(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevSnapshotUrl: prevSnapshotUrl, range: rangeToString({ offset: offset, count: count }) }, convertTracingToRequestOptionsBase(updatedOptions))) + .then(rangeResponseFromModel)]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_41 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_41.message + }); + throw e_41; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Resizes the page blob to the specified size (which must be a multiple of 512). * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties @@ -83359,24 +86165,36 @@ class PageBlobClient extends BlobClient { * @param options - Options to the Page Blob Resize operation. * @returns Response data for the Page Blob Resize operation. */ - async resize(size, options = {}) { + PageBlobClient.prototype.resize = function (size, options) { var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-resize", options); - try { - return await this.pageBlobContext.resize(size, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_42; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + options.conditions = options.conditions || {}; + _b = createSpan("PageBlobClient-resize", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.pageBlobContext.resize(size, tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_42 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_42.message + }); + throw e_42; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Sets a page blob's sequence number. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties @@ -83386,24 +86204,36 @@ class PageBlobClient extends BlobClient { * @param options - Options to the Page Blob Update Sequence Number operation. * @returns Response data for the Page Blob Update Sequence Number operation. */ - async updateSequenceNumber(sequenceNumberAction, sequenceNumber, options = {}) { + PageBlobClient.prototype.updateSequenceNumber = function (sequenceNumberAction, sequenceNumber, options) { var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-updateSequenceNumber", options); - try { - return await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, Object.assign({ abortSignal: options.abortSignal, blobSequenceNumber: sequenceNumber, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_43; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + options.conditions = options.conditions || {}; + _b = createSpan("PageBlobClient-updateSequenceNumber", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, tslib.__assign({ abortSignal: options.abortSignal, blobSequenceNumber: sequenceNumber, leaseAccessConditions: options.conditions, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_43 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_43.message + }); + throw e_43; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. * The snapshot is copied such that only the differential changes between the previously @@ -83417,46 +86247,69 @@ class PageBlobClient extends BlobClient { * @param options - Options to the Page Blob Copy Incremental operation. * @returns Response data for the Page Blob Copy Incremental operation. */ - async startCopyIncremental(copySource, options = {}) { + PageBlobClient.prototype.startCopyIncremental = function (copySource, options) { var _a; - const { span, updatedOptions } = createSpan("PageBlobClient-startCopyIncremental", options); - try { - return await this.pageBlobContext.copyIncremental(copySource, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, e_44; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("PageBlobClient-startCopyIncremental", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.pageBlobContext.copyIncremental(copySource, tslib.__assign({ abortSignal: options.abortSignal, modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_44 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_44.message + }); + throw e_44; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } -} + }); + }; + return PageBlobClient; +}(BlobClient)); // Copyright (c) Microsoft Corporation. -async function getBodyAsText(batchResponse) { - let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES); - const responseLength = await streamToBuffer2(batchResponse.readableStreamBody, buffer); - // Slice the buffer to trim the empty ending. - buffer = buffer.slice(0, responseLength); - return buffer.toString(); +function getBodyAsText(batchResponse) { + return tslib.__awaiter(this, void 0, void 0, function () { + var buffer, responseLength; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES); + return [4 /*yield*/, streamToBuffer2(batchResponse.readableStreamBody, buffer)]; + case 1: + responseLength = _a.sent(); + // Slice the buffer to trim the empty ending. + buffer = buffer.slice(0, responseLength); + return [2 /*return*/, buffer.toString()]; + } + }); + }); } function utf8ByteLength(str) { return Buffer.byteLength(str); } // Copyright (c) Microsoft Corporation. -const HTTP_HEADER_DELIMITER = ": "; -const SPACE_DELIMITER = " "; -const NOT_FOUND = -1; +var HTTP_HEADER_DELIMITER = ": "; +var SPACE_DELIMITER = " "; +var NOT_FOUND = -1; /** * Util class for parsing batch response. */ -class BatchResponseParser { - constructor(batchResponse, subRequests) { +var BatchResponseParser = /** @class */ (function () { + function BatchResponseParser(batchResponse, subRequests) { if (!batchResponse || !batchResponse.contentType) { // In special case(reported), server may return invalid content-type which could not be parsed. throw new RangeError("batchResponse is malformed or doesn't contain valid content-type."); @@ -83468,119 +86321,129 @@ class BatchResponseParser { this.batchResponse = batchResponse; this.subRequests = subRequests; this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1]; - this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`; - this.batchResponseEnding = `--${this.responseBatchBoundary}--`; + this.perResponsePrefix = "--" + this.responseBatchBoundary + HTTP_LINE_ENDING; + this.batchResponseEnding = "--" + this.responseBatchBoundary + "--"; } // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response - async parseBatchResponse() { - // When logic reach here, suppose batch request has already succeeded with 202, so we can further parse - // sub request's response. - if (this.batchResponse._response.status != HTTPURLConnection.HTTP_ACCEPTED) { - throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`); - } - const responseBodyAsText = await getBodyAsText(this.batchResponse); - const subResponses = responseBodyAsText - .split(this.batchResponseEnding)[0] // string after ending is useless - .split(this.perResponsePrefix) - .slice(1); // string before first response boundary is useless - const subResponseCount = subResponses.length; - // Defensive coding in case of potential error parsing. - // Note: subResponseCount == 1 is special case where sub request is invalid. - // We try to prevent such cases through early validation, e.g. validate sub request count >= 1. - // While in unexpected sub request invalid case, we allow sub response to be parsed and return to user. - if (subResponseCount != this.subRequests.size && subResponseCount != 1) { - throw new Error("Invalid state: sub responses' count is not equal to sub requests' count."); - } - const deserializedSubResponses = new Array(subResponseCount); - let subResponsesSucceededCount = 0; - let subResponsesFailedCount = 0; - // Parse sub subResponses. - for (let index = 0; index < subResponseCount; index++) { - const subResponse = subResponses[index]; - const deserializedSubResponse = {}; - deserializedSubResponse.headers = new coreHttp.HttpHeaders(); - const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`); - let subRespHeaderStartFound = false; - let subRespHeaderEndFound = false; - let subRespFailed = false; - let contentId = NOT_FOUND; - for (const responseLine of responseLines) { - if (!subRespHeaderStartFound) { - // Convention line to indicate content ID - if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) { - contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]); - } - // Http version line with status code indicates the start of sub request's response. - // Example: HTTP/1.1 202 Accepted - if (responseLine.startsWith(HTTP_VERSION_1_1)) { - subRespHeaderStartFound = true; - const tokens = responseLine.split(SPACE_DELIMITER); - deserializedSubResponse.status = parseInt(tokens[1]); - deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER); - } - continue; // Skip convention headers not specifically for sub request i.e. Content-Type: application/http and Content-ID: * - } - if (responseLine.trim() === "") { - // Sub response's header start line already found, and the first empty line indicates header end line found. - if (!subRespHeaderEndFound) { - subRespHeaderEndFound = true; - } - continue; // Skip empty line - } - // Note: when code reach here, it indicates subRespHeaderStartFound == true - if (!subRespHeaderEndFound) { - if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) { - // Defensive coding to prevent from missing valuable lines. - throw new Error(`Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`); - } - // Parse headers of sub response. - const tokens = responseLine.split(HTTP_HEADER_DELIMITER); - deserializedSubResponse.headers.set(tokens[0], tokens[1]); - if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) { - deserializedSubResponse.errorCode = tokens[1]; - subRespFailed = true; - } + BatchResponseParser.prototype.parseBatchResponse = function () { + return tslib.__awaiter(this, void 0, void 0, function () { + var responseBodyAsText, subResponses, subResponseCount, deserializedSubResponses, subResponsesSucceededCount, subResponsesFailedCount, index, subResponse, deserializedSubResponse, responseLines, subRespHeaderStartFound, subRespHeaderEndFound, subRespFailed, contentId, _i, responseLines_1, responseLine, tokens, tokens; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + // When logic reach here, suppose batch request has already succeeded with 202, so we can further parse + // sub request's response. + if (this.batchResponse._response.status != HTTPURLConnection.HTTP_ACCEPTED) { + throw new Error("Invalid state: batch request failed with status: '" + this.batchResponse._response.status + "'."); + } + return [4 /*yield*/, getBodyAsText(this.batchResponse)]; + case 1: + responseBodyAsText = _a.sent(); + subResponses = responseBodyAsText + .split(this.batchResponseEnding)[0] // string after ending is useless + .split(this.perResponsePrefix) + .slice(1); + subResponseCount = subResponses.length; + // Defensive coding in case of potential error parsing. + // Note: subResponseCount == 1 is special case where sub request is invalid. + // We try to prevent such cases through early validation, e.g. validate sub request count >= 1. + // While in unexpected sub request invalid case, we allow sub response to be parsed and return to user. + if (subResponseCount != this.subRequests.size && subResponseCount != 1) { + throw new Error("Invalid state: sub responses' count is not equal to sub requests' count."); + } + deserializedSubResponses = new Array(subResponseCount); + subResponsesSucceededCount = 0; + subResponsesFailedCount = 0; + // Parse sub subResponses. + for (index = 0; index < subResponseCount; index++) { + subResponse = subResponses[index]; + deserializedSubResponse = {}; + deserializedSubResponse.headers = new coreHttp.HttpHeaders(); + responseLines = subResponse.split("" + HTTP_LINE_ENDING); + subRespHeaderStartFound = false; + subRespHeaderEndFound = false; + subRespFailed = false; + contentId = NOT_FOUND; + for (_i = 0, responseLines_1 = responseLines; _i < responseLines_1.length; _i++) { + responseLine = responseLines_1[_i]; + if (!subRespHeaderStartFound) { + // Convention line to indicate content ID + if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) { + contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]); + } + // Http version line with status code indicates the start of sub request's response. + // Example: HTTP/1.1 202 Accepted + if (responseLine.startsWith(HTTP_VERSION_1_1)) { + subRespHeaderStartFound = true; + tokens = responseLine.split(SPACE_DELIMITER); + deserializedSubResponse.status = parseInt(tokens[1]); + deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER); + } + continue; // Skip convention headers not specifically for sub request i.e. Content-Type: application/http and Content-ID: * + } + if (responseLine.trim() === "") { + // Sub response's header start line already found, and the first empty line indicates header end line found. + if (!subRespHeaderEndFound) { + subRespHeaderEndFound = true; + } + continue; // Skip empty line + } + // Note: when code reach here, it indicates subRespHeaderStartFound == true + if (!subRespHeaderEndFound) { + if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) { + // Defensive coding to prevent from missing valuable lines. + throw new Error("Invalid state: find non-empty line '" + responseLine + "' without HTTP header delimiter '" + HTTP_HEADER_DELIMITER + "'."); + } + tokens = responseLine.split(HTTP_HEADER_DELIMITER); + deserializedSubResponse.headers.set(tokens[0], tokens[1]); + if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) { + deserializedSubResponse.errorCode = tokens[1]; + subRespFailed = true; + } + } + else { + // Assemble body of sub response. + if (!deserializedSubResponse.bodyAsText) { + deserializedSubResponse.bodyAsText = ""; + } + deserializedSubResponse.bodyAsText += responseLine; + } + } // Inner for end + // The response will contain the Content-ID header for each corresponding subrequest response to use for tracking. + // The Content-IDs are set to a valid index in the subrequests we sent. In the status code 202 path, we could expect it + // to be 1-1 mapping from the [0, subRequests.size) to the Content-IDs returned. If not, we simply don't return that + // unexpected subResponse in the parsed reponse and we can always look it up in the raw response for debugging purpose. + if (contentId != NOT_FOUND && + Number.isInteger(contentId) && + contentId >= 0 && + contentId < this.subRequests.size && + deserializedSubResponses[contentId] === undefined) { + deserializedSubResponse._request = this.subRequests.get(contentId); + deserializedSubResponses[contentId] = deserializedSubResponse; + } + else { + logger.error("subResponses[" + index + "] is dropped as the Content-ID is not found or invalid, Content-ID: " + contentId); + } + if (subRespFailed) { + subResponsesFailedCount++; + } + else { + subResponsesSucceededCount++; + } + } + return [2 /*return*/, { + subResponses: deserializedSubResponses, + subResponsesSucceededCount: subResponsesSucceededCount, + subResponsesFailedCount: subResponsesFailedCount + }]; } - else { - // Assemble body of sub response. - if (!deserializedSubResponse.bodyAsText) { - deserializedSubResponse.bodyAsText = ""; - } - deserializedSubResponse.bodyAsText += responseLine; - } - } // Inner for end - // The response will contain the Content-ID header for each corresponding subrequest response to use for tracking. - // The Content-IDs are set to a valid index in the subrequests we sent. In the status code 202 path, we could expect it - // to be 1-1 mapping from the [0, subRequests.size) to the Content-IDs returned. If not, we simply don't return that - // unexpected subResponse in the parsed reponse and we can always look it up in the raw response for debugging purpose. - if (contentId != NOT_FOUND && - Number.isInteger(contentId) && - contentId >= 0 && - contentId < this.subRequests.size && - deserializedSubResponses[contentId] === undefined) { - deserializedSubResponse._request = this.subRequests.get(contentId); - deserializedSubResponses[contentId] = deserializedSubResponse; - } - else { - logger.error(`subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`); - } - if (subRespFailed) { - subResponsesFailedCount++; - } - else { - subResponsesSucceededCount++; - } - } - return { - subResponses: deserializedSubResponses, - subResponsesSucceededCount: subResponsesSucceededCount, - subResponsesFailedCount: subResponsesFailedCount - }; - } -} + }); + }); + }; + return BatchResponseParser; +}()); // Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. var MutexLockStatus; (function (MutexLockStatus) { MutexLockStatus[MutexLockStatus["LOCKED"] = 0] = "LOCKED"; @@ -83589,68 +86452,82 @@ var MutexLockStatus; /** * An async mutex lock. */ -class Mutex { +var Mutex = /** @class */ (function () { + function Mutex() { + } /** * Lock for a specific key. If the lock has been acquired by another customer, then * will wait until getting the lock. * * @param key - lock key */ - static async lock(key) { - return new Promise((resolve) => { - if (this.keys[key] === undefined || this.keys[key] === MutexLockStatus.UNLOCKED) { - this.keys[key] = MutexLockStatus.LOCKED; - resolve(); - } - else { - this.onUnlockEvent(key, () => { - this.keys[key] = MutexLockStatus.LOCKED; - resolve(); - }); - } + Mutex.lock = function (key) { + return tslib.__awaiter(this, void 0, void 0, function () { + var _this = this; + return tslib.__generator(this, function (_a) { + return [2 /*return*/, new Promise(function (resolve) { + if (_this.keys[key] === undefined || _this.keys[key] === MutexLockStatus.UNLOCKED) { + _this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + } + else { + _this.onUnlockEvent(key, function () { + _this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + }); + } + })]; + }); }); - } + }; /** * Unlock a key. * * @param key - */ - static async unlock(key) { - return new Promise((resolve) => { - if (this.keys[key] === MutexLockStatus.LOCKED) { - this.emitUnlockEvent(key); - } - delete this.keys[key]; - resolve(); + Mutex.unlock = function (key) { + return tslib.__awaiter(this, void 0, void 0, function () { + var _this = this; + return tslib.__generator(this, function (_a) { + return [2 /*return*/, new Promise(function (resolve) { + if (_this.keys[key] === MutexLockStatus.LOCKED) { + _this.emitUnlockEvent(key); + } + delete _this.keys[key]; + resolve(); + })]; + }); }); - } - static onUnlockEvent(key, handler) { + }; + Mutex.onUnlockEvent = function (key, handler) { if (this.listeners[key] === undefined) { this.listeners[key] = [handler]; } else { this.listeners[key].push(handler); } - } - static emitUnlockEvent(key) { + }; + Mutex.emitUnlockEvent = function (key) { + var _this = this; if (this.listeners[key] !== undefined && this.listeners[key].length > 0) { - const handler = this.listeners[key].shift(); - setImmediate(() => { - handler.call(this); + var handler_1 = this.listeners[key].shift(); + setImmediate(function () { + handler_1.call(_this); }); } - } -} -Mutex.keys = {}; -Mutex.listeners = {}; + }; + Mutex.keys = {}; + Mutex.listeners = {}; + return Mutex; +}()); // Copyright (c) Microsoft Corporation. /** * A BlobBatch represents an aggregated set of operations on blobs. * Currently, only `delete` and `setAccessTier` are supported. */ -class BlobBatch { - constructor() { +var BlobBatch = /** @class */ (function () { + function BlobBatch() { this.batch = "batch"; this.batchRequest = new InnerBatchRequest(); } @@ -83659,151 +86536,203 @@ class BlobBatch { * The value must be multipart/mixed with a batch boundary. * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 */ - getMultiPartContentType() { + BlobBatch.prototype.getMultiPartContentType = function () { return this.batchRequest.getMultipartContentType(); - } + }; /** * Get assembled HTTP request body for sub requests. */ - getHttpRequestBody() { + BlobBatch.prototype.getHttpRequestBody = function () { return this.batchRequest.getHttpRequestBody(); - } + }; /** * Get sub requests that are added into the batch request. */ - getSubRequests() { + BlobBatch.prototype.getSubRequests = function () { return this.batchRequest.getSubRequests(); - } - async addSubRequestInternal(subRequest, assembleSubRequestFunc) { - await Mutex.lock(this.batch); - try { - this.batchRequest.preAddSubRequest(subRequest); - await assembleSubRequestFunc(); - this.batchRequest.postAddSubRequest(subRequest); - } - finally { - await Mutex.unlock(this.batch); - } - } - setBatchType(batchType) { + }; + BlobBatch.prototype.addSubRequestInternal = function (subRequest, assembleSubRequestFunc) { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, Mutex.lock(this.batch)]; + case 1: + _a.sent(); + _a.label = 2; + case 2: + _a.trys.push([2, , 4, 6]); + this.batchRequest.preAddSubRequest(subRequest); + return [4 /*yield*/, assembleSubRequestFunc()]; + case 3: + _a.sent(); + this.batchRequest.postAddSubRequest(subRequest); + return [3 /*break*/, 6]; + case 4: return [4 /*yield*/, Mutex.unlock(this.batch)]; + case 5: + _a.sent(); + return [7 /*endfinally*/]; + case 6: return [2 /*return*/]; + } + }); + }); + }; + BlobBatch.prototype.setBatchType = function (batchType) { if (!this.batchType) { this.batchType = batchType; } if (this.batchType !== batchType) { - throw new RangeError(`BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`); - } - } - async deleteBlob(urlOrBlobClient, credentialOrOptions, options) { - let url; - let credential; - if (typeof urlOrBlobClient === "string" && - ((coreHttp.isNode && credentialOrOptions instanceof StorageSharedKeyCredential) || - credentialOrOptions instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrOptions))) { - // First overload - url = urlOrBlobClient; - credential = credentialOrOptions; - } - else if (urlOrBlobClient instanceof BlobClient) { - // Second overload - url = urlOrBlobClient.url; - credential = urlOrBlobClient.credential; - options = credentialOrOptions; - } - else { - throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); - } - if (!options) { - options = {}; - } - const { span, updatedOptions } = createSpan("BatchDeleteRequest-addSubRequest", options); - try { - this.setBatchType("delete"); - await this.addSubRequestInternal({ - url: url, - credential: credential - }, async () => { - await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions); - }); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } - finally { - span.end(); - } - } - async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { - let url; - let credential; - let tier; - if (typeof urlOrBlobClient === "string" && - ((coreHttp.isNode && credentialOrTier instanceof StorageSharedKeyCredential) || - credentialOrTier instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrTier))) { - // First overload - url = urlOrBlobClient; - credential = credentialOrTier; - tier = tierOrOptions; - } - else if (urlOrBlobClient instanceof BlobClient) { - // Second overload - url = urlOrBlobClient.url; - credential = urlOrBlobClient.credential; - tier = credentialOrTier; - options = tierOrOptions; - } - else { - throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); - } - if (!options) { - options = {}; + throw new RangeError("BlobBatch only supports one operation type per batch and it already is being used for " + this.batchType + " operations."); } - const { span, updatedOptions } = createSpan("BatchSetTierRequest-addSubRequest", options); - try { - this.setBatchType("setAccessTier"); - await this.addSubRequestInternal({ - url: url, - credential: credential - }, async () => { - await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions); + }; + BlobBatch.prototype.deleteBlob = function (urlOrBlobClient, credentialOrOptions, options) { + return tslib.__awaiter(this, void 0, void 0, function () { + var url, credential, _a, span, updatedOptions, e_1; + var _this = this; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + if (typeof urlOrBlobClient === "string" && + ((coreHttp.isNode && credentialOrOptions instanceof StorageSharedKeyCredential) || + credentialOrOptions instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrOptions))) { + // First overload + url = urlOrBlobClient; + credential = credentialOrOptions; + } + else if (urlOrBlobClient instanceof BlobClient) { + // Second overload + url = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + options = credentialOrOptions; + } + else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + _a = createSpan("BatchDeleteRequest-addSubRequest", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + this.setBatchType("delete"); + return [4 /*yield*/, this.addSubRequestInternal({ + url: url, + credential: credential + }, function () { return tslib.__awaiter(_this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions)]; + case 1: + _a.sent(); + return [2 /*return*/]; + } + }); + }); })]; + case 2: + _b.sent(); + return [3 /*break*/, 5]; + case 3: + e_1 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_1.message + }); + throw e_1; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + }); + }; + BlobBatch.prototype.setBlobAccessTier = function (urlOrBlobClient, credentialOrTier, tierOrOptions, options) { + return tslib.__awaiter(this, void 0, void 0, function () { + var url, credential, tier, _a, span, updatedOptions, e_2; + var _this = this; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + if (typeof urlOrBlobClient === "string" && + ((coreHttp.isNode && credentialOrTier instanceof StorageSharedKeyCredential) || + credentialOrTier instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrTier))) { + // First overload + url = urlOrBlobClient; + credential = credentialOrTier; + tier = tierOrOptions; + } + else if (urlOrBlobClient instanceof BlobClient) { + // Second overload + url = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + tier = credentialOrTier; + options = tierOrOptions; + } + else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + _a = createSpan("BatchSetTierRequest-addSubRequest", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + this.setBatchType("setAccessTier"); + return [4 /*yield*/, this.addSubRequestInternal({ + url: url, + credential: credential + }, function () { return tslib.__awaiter(_this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions)]; + case 1: + _a.sent(); + return [2 /*return*/]; + } + }); + }); })]; + case 2: + _b.sent(); + return [3 /*break*/, 5]; + case 3: + e_2 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_2.message + }); + throw e_2; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } -} + }); + }; + return BlobBatch; +}()); /** * Inner batch request class which is responsible for assembling and serializing sub requests. * See https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#request-body for how requests are assembled. */ -class InnerBatchRequest { - constructor() { +var InnerBatchRequest = /** @class */ (function () { + function InnerBatchRequest() { this.operationCount = 0; this.body = ""; - const tempGuid = coreHttp.generateUuid(); + var tempGuid = coreHttp.generateUuid(); // batch_{batchid} - this.boundary = `batch_${tempGuid}`; + this.boundary = "batch_" + tempGuid; // --batch_{batchid} // Content-Type: application/http // Content-Transfer-Encoding: binary - this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`; + this.subRequestPrefix = "--" + this.boundary + HTTP_LINE_ENDING + HeaderConstants.CONTENT_TYPE + ": application/http" + HTTP_LINE_ENDING + HeaderConstants.CONTENT_TRANSFER_ENCODING + ": binary"; // multipart/mixed; boundary=batch_{batchid} - this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`; + this.multipartContentType = "multipart/mixed; boundary=" + this.boundary; // --batch_{batchid}-- - this.batchRequestEnding = `--${this.boundary}--`; + this.batchRequestEnding = "--" + this.boundary + "--"; this.subRequests = new Map(); } /** @@ -83813,10 +86742,10 @@ class InnerBatchRequest { * and intercept request from going to wire. * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. */ - createPipeline(credential) { - const isAnonymousCreds = credential instanceof AnonymousCredential; - const policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); // [deserializationPolicy, BatchHeaderFilterPolicyFactory, (Optional)Credential, BatchRequestAssemblePolicyFactory] - const factories = new Array(policyFactoryLength); + InnerBatchRequest.prototype.createPipeline = function (credential) { + var isAnonymousCreds = credential instanceof AnonymousCredential; + var policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); // [deserializationPolicy, BatchHeaderFilterPolicyFactory, (Optional)Credential, BatchRequestAssemblePolicyFactory] + var factories = new Array(policyFactoryLength); factories[0] = coreHttp.deserializationPolicy(); // Default deserializationPolicy is provided by protocol layer factories[1] = new BatchHeaderFilterPolicyFactory(); // Use batch header filter policy to exclude unnecessary headers if (!isAnonymousCreds) { @@ -83826,93 +86755,117 @@ class InnerBatchRequest { } factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); // Use batch assemble policy to assemble request and intercept request from going to wire return new Pipeline(factories, {}); - } - appendSubRequestToBody(request) { + }; + InnerBatchRequest.prototype.appendSubRequestToBody = function (request) { // Start to assemble sub request this.body += [ this.subRequestPrefix, - `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, + HeaderConstants.CONTENT_ID + ": " + this.operationCount, "", - `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}` // sub request start line with method + request.method.toString() + " " + getURLPathAndQuery(request.url) + " " + HTTP_VERSION_1_1 + HTTP_LINE_ENDING // sub request start line with method ].join(HTTP_LINE_ENDING); - for (const header of request.headers.headersArray()) { - this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`; + for (var _i = 0, _a = request.headers.headersArray(); _i < _a.length; _i++) { + var header = _a[_i]; + this.body += header.name + ": " + header.value + HTTP_LINE_ENDING; } this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line // No body to assemble for current batch request support // End to assemble sub request - } - preAddSubRequest(subRequest) { + }; + InnerBatchRequest.prototype.preAddSubRequest = function (subRequest) { if (this.operationCount >= BATCH_MAX_REQUEST) { - throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); + throw new RangeError("Cannot exceed " + BATCH_MAX_REQUEST + " sub requests in a single batch"); } // Fast fail if url for sub request is invalid - const path = getURLPath(subRequest.url); + var path = getURLPath(subRequest.url); if (!path || path == "") { - throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); + throw new RangeError("Invalid url for sub request: '" + subRequest.url + "'"); } - } - postAddSubRequest(subRequest) { + }; + InnerBatchRequest.prototype.postAddSubRequest = function (subRequest) { this.subRequests.set(this.operationCount, subRequest); this.operationCount++; - } + }; // Return the http request body with assembling the ending line to the sub request body. - getHttpRequestBody() { - return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`; - } - getMultipartContentType() { + InnerBatchRequest.prototype.getHttpRequestBody = function () { + return "" + this.body + this.batchRequestEnding + HTTP_LINE_ENDING; + }; + InnerBatchRequest.prototype.getMultipartContentType = function () { return this.multipartContentType; - } - getSubRequests() { + }; + InnerBatchRequest.prototype.getSubRequests = function () { return this.subRequests; - } -} -class BatchRequestAssemblePolicy extends coreHttp.BaseRequestPolicy { - constructor(batchRequest, nextPolicy, options) { - super(nextPolicy, options); - this.dummyResponse = { + }; + return InnerBatchRequest; +}()); +var BatchRequestAssemblePolicy = /** @class */ (function (_super) { + tslib.__extends(BatchRequestAssemblePolicy, _super); + function BatchRequestAssemblePolicy(batchRequest, nextPolicy, options) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.dummyResponse = { request: new coreHttp.WebResource(), status: 200, headers: new coreHttp.HttpHeaders() }; - this.batchRequest = batchRequest; - } - async sendRequest(request) { - await this.batchRequest.appendSubRequestToBody(request); - return this.dummyResponse; // Intercept request from going to wire + _this.batchRequest = batchRequest; + return _this; } -} -class BatchRequestAssemblePolicyFactory { - constructor(batchRequest) { + BatchRequestAssemblePolicy.prototype.sendRequest = function (request) { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, this.batchRequest.appendSubRequestToBody(request)]; + case 1: + _a.sent(); + return [2 /*return*/, this.dummyResponse]; // Intercept request from going to wire + } + }); + }); + }; + return BatchRequestAssemblePolicy; +}(coreHttp.BaseRequestPolicy)); +var BatchRequestAssemblePolicyFactory = /** @class */ (function () { + function BatchRequestAssemblePolicyFactory(batchRequest) { this.batchRequest = batchRequest; } - create(nextPolicy, options) { + BatchRequestAssemblePolicyFactory.prototype.create = function (nextPolicy, options) { return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options); + }; + return BatchRequestAssemblePolicyFactory; +}()); +var BatchHeaderFilterPolicy = /** @class */ (function (_super) { + tslib.__extends(BatchHeaderFilterPolicy, _super); + function BatchHeaderFilterPolicy(nextPolicy, options) { + return _super.call(this, nextPolicy, options) || this; + } + BatchHeaderFilterPolicy.prototype.sendRequest = function (request) { + return tslib.__awaiter(this, void 0, void 0, function () { + var xMsHeaderName, _i, _a, header; + return tslib.__generator(this, function (_b) { + xMsHeaderName = ""; + for (_i = 0, _a = request.headers.headersArray(); _i < _a.length; _i++) { + header = _a[_i]; + if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) { + xMsHeaderName = header.name; + } + } + if (xMsHeaderName !== "") { + request.headers.remove(xMsHeaderName); // The subrequests should not have the x-ms-version header. + } + return [2 /*return*/, this._nextPolicy.sendRequest(request)]; + }); + }); + }; + return BatchHeaderFilterPolicy; +}(coreHttp.BaseRequestPolicy)); +var BatchHeaderFilterPolicyFactory = /** @class */ (function () { + function BatchHeaderFilterPolicyFactory() { } -} -class BatchHeaderFilterPolicy extends coreHttp.BaseRequestPolicy { - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - async sendRequest(request) { - let xMsHeaderName = ""; - for (const header of request.headers.headersArray()) { - if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) { - xMsHeaderName = header.name; - } - } - if (xMsHeaderName !== "") { - request.headers.remove(xMsHeaderName); // The subrequests should not have the x-ms-version header. - } - return this._nextPolicy.sendRequest(request); - } -} -class BatchHeaderFilterPolicyFactory { - constructor() { } - create(nextPolicy, options) { + BatchHeaderFilterPolicyFactory.prototype.create = function (nextPolicy, options) { return new BatchHeaderFilterPolicy(nextPolicy, options); - } -} + }; + return BatchHeaderFilterPolicyFactory; +}()); // Copyright (c) Microsoft Corporation. /** @@ -83920,9 +86873,9 @@ class BatchHeaderFilterPolicyFactory { * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch */ -class BlobBatchClient { - constructor(url, credentialOrPipeline, options) { - let pipeline; +var BlobBatchClient = /** @class */ (function () { + function BlobBatchClient(url, credentialOrPipeline, options) { + var pipeline; if (credentialOrPipeline instanceof Pipeline) { pipeline = credentialOrPipeline; } @@ -83933,8 +86886,8 @@ class BlobBatchClient { else { pipeline = newPipeline(credentialOrPipeline, options); } - const storageClientContext = new StorageClientContext(url, pipeline.toServiceClientOptions()); - const path = getURLPath(url); + var storageClientContext = new StorageClientContext(url, pipeline.toServiceClientOptions()); + var path = getURLPath(url); if (path && path !== "/") { // Container scoped. this.serviceOrContainerContext = new Container(storageClientContext); @@ -83947,33 +86900,67 @@ class BlobBatchClient { * Creates a {@link BlobBatch}. * A BlobBatch represents an aggregated set of operations on blobs. */ - createBatch() { + BlobBatchClient.prototype.createBatch = function () { return new BlobBatch(); - } - async deleteBlobs(urlsOrBlobClients, credentialOrOptions, options) { - const batch = new BlobBatch(); - for (const urlOrBlobClient of urlsOrBlobClients) { - if (typeof urlOrBlobClient === "string") { - await batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options); - } - else { - await batch.deleteBlob(urlOrBlobClient, credentialOrOptions); - } - } - return this.submitBatch(batch); - } - async setBlobsAccessTier(urlsOrBlobClients, credentialOrTier, tierOrOptions, options) { - const batch = new BlobBatch(); - for (const urlOrBlobClient of urlsOrBlobClients) { - if (typeof urlOrBlobClient === "string") { - await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options); - } - else { - await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions); - } - } - return this.submitBatch(batch); - } + }; + BlobBatchClient.prototype.deleteBlobs = function (urlsOrBlobClients, credentialOrOptions, options) { + return tslib.__awaiter(this, void 0, void 0, function () { + var batch, _i, urlsOrBlobClients_1, urlOrBlobClient; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + batch = new BlobBatch(); + _i = 0, urlsOrBlobClients_1 = urlsOrBlobClients; + _a.label = 1; + case 1: + if (!(_i < urlsOrBlobClients_1.length)) return [3 /*break*/, 6]; + urlOrBlobClient = urlsOrBlobClients_1[_i]; + if (!(typeof urlOrBlobClient === "string")) return [3 /*break*/, 3]; + return [4 /*yield*/, batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options)]; + case 2: + _a.sent(); + return [3 /*break*/, 5]; + case 3: return [4 /*yield*/, batch.deleteBlob(urlOrBlobClient, credentialOrOptions)]; + case 4: + _a.sent(); + _a.label = 5; + case 5: + _i++; + return [3 /*break*/, 1]; + case 6: return [2 /*return*/, this.submitBatch(batch)]; + } + }); + }); + }; + BlobBatchClient.prototype.setBlobsAccessTier = function (urlsOrBlobClients, credentialOrTier, tierOrOptions, options) { + return tslib.__awaiter(this, void 0, void 0, function () { + var batch, _i, urlsOrBlobClients_2, urlOrBlobClient; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + batch = new BlobBatch(); + _i = 0, urlsOrBlobClients_2 = urlsOrBlobClients; + _a.label = 1; + case 1: + if (!(_i < urlsOrBlobClients_2.length)) return [3 /*break*/, 6]; + urlOrBlobClient = urlsOrBlobClients_2[_i]; + if (!(typeof urlOrBlobClient === "string")) return [3 /*break*/, 3]; + return [4 /*yield*/, batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options)]; + case 2: + _a.sent(); + return [3 /*break*/, 5]; + case 3: return [4 /*yield*/, batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions)]; + case 4: + _a.sent(); + _a.label = 5; + case 5: + _i++; + return [3 /*break*/, 1]; + case 6: return [2 /*return*/, this.submitBatch(batch)]; + } + }); + }); + }; /** * Submit batch request which consists of multiple subrequests. * @@ -84009,51 +86996,67 @@ class BlobBatchClient { * @param batchRequest - A set of Delete or SetTier operations. * @param options - */ - async submitBatch(batchRequest, options = {}) { - if (!batchRequest || batchRequest.getSubRequests().size == 0) { - throw new RangeError("Batch request should contain one or more sub requests."); - } - const { span, updatedOptions } = createSpan("BlobBatchClient-submitBatch", options); - try { - const batchRequestBody = batchRequest.getHttpRequestBody(); - // ServiceSubmitBatchResponseModel and ContainerSubmitBatchResponse are compatible for now. - const rawBatchResponse = await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); - // Parse the sub responses result, if logic reaches here(i.e. the batch request succeeded with status code 202). - const batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); - const responseSummary = await batchResponseParser.parseBatchResponse(); - const res = { - _response: rawBatchResponse._response, - contentType: rawBatchResponse.contentType, - errorCode: rawBatchResponse.errorCode, - requestId: rawBatchResponse.requestId, - clientRequestId: rawBatchResponse.clientRequestId, - version: rawBatchResponse.version, - subResponses: responseSummary.subResponses, - subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, - subResponsesFailedCount: responseSummary.subResponsesFailedCount - }; - return res; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobBatchClient.prototype.submitBatch = function (batchRequest, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, batchRequestBody, rawBatchResponse, batchResponseParser, responseSummary, res, e_1; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + if (!batchRequest || batchRequest.getSubRequests().size == 0) { + throw new RangeError("Batch request should contain one or more sub requests."); + } + _a = createSpan("BlobBatchClient-submitBatch", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 4, 5, 6]); + batchRequestBody = batchRequest.getHttpRequestBody(); + return [4 /*yield*/, this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, tslib.__assign(tslib.__assign({}, options), convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: + rawBatchResponse = _b.sent(); + batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); + return [4 /*yield*/, batchResponseParser.parseBatchResponse()]; + case 3: + responseSummary = _b.sent(); + res = { + _response: rawBatchResponse._response, + contentType: rawBatchResponse.contentType, + errorCode: rawBatchResponse.errorCode, + requestId: rawBatchResponse.requestId, + clientRequestId: rawBatchResponse.clientRequestId, + version: rawBatchResponse.version, + subResponses: responseSummary.subResponses, + subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, + subResponsesFailedCount: responseSummary.subResponsesFailedCount + }; + return [2 /*return*/, res]; + case 4: + e_1 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_1.message + }); + throw e_1; + case 5: + span.end(); + return [7 /*endfinally*/]; + case 6: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } -} + }); + }; + return BlobBatchClient; +}()); /** * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs. */ -class ContainerClient extends StorageClient { - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, options) { - let pipeline; - let url; +var ContainerClient = /** @class */ (function (_super) { + tslib.__extends(ContainerClient, _super); + function ContainerClient(urlOrConnectionString, credentialOrPipelineOrContainerName, options) { + var _this = this; + var pipeline; + var url; options = options || {}; if (credentialOrPipelineOrContainerName instanceof Pipeline) { // (url: string, pipeline: Pipeline) @@ -84077,11 +87080,11 @@ class ContainerClient extends StorageClient { else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string") { // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) - const containerName = credentialOrPipelineOrContainerName; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + var containerName = credentialOrPipelineOrContainerName; + var extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)); options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); pipeline = newPipeline(sharedKeyCredential, options); @@ -84101,16 +87104,21 @@ class ContainerClient extends StorageClient { else { throw new Error("Expecting non-empty strings for containerName parameter"); } - super(url, pipeline); - this._containerName = this.getContainerNameFromUrl(); - this.containerContext = new Container(this.storageClientContext); - } - /** - * The name of the container. - */ - get containerName() { - return this._containerName; + _this = _super.call(this, url, pipeline) || this; + _this._containerName = _this.getContainerNameFromUrl(); + _this.containerContext = new Container(_this.storageClientContext); + return _this; } + Object.defineProperty(ContainerClient.prototype, "containerName", { + /** + * The name of the container. + */ + get: function () { + return this._containerName; + }, + enumerable: false, + configurable: true + }); /** * Creates a new container under the specified account. If the container with * the same name already exists, the operation fails. @@ -84127,24 +87135,37 @@ class ContainerClient extends StorageClient { * console.log("Container was created successfully", createContainerResponse.requestId); * ``` */ - async create(options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-create", options); - try { - // Spread operator in destructuring assignments, - // this will filter out unwanted properties from the response object into result object - return await this.containerContext.create(Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + ContainerClient.prototype.create = function (options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_1; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("ContainerClient-create", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.containerContext.create(tslib.__assign(tslib.__assign({}, options), convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: + // Spread operator in destructuring assignments, + // this will filter out unwanted properties from the response object into result object + return [2 /*return*/, _b.sent()]; + case 3: + e_1 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_1.message + }); + throw e_1; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Creates a new container under the specified account. If the container with * the same name already exists, it is not changed. @@ -84152,32 +87173,45 @@ class ContainerClient extends StorageClient { * * @param options - */ - async createIfNotExists(options = {}) { + ContainerClient.prototype.createIfNotExists = function (options) { var _a, _b; - const { span, updatedOptions } = createSpan("ContainerClient-createIfNotExists", options); - try { - const res = await this.create(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a container only if it does not already exist." - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); - } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _c, span, updatedOptions, res, e_2; + return tslib.__generator(this, function (_d) { + switch (_d.label) { + case 0: + _c = createSpan("ContainerClient-createIfNotExists", options), span = _c.span, updatedOptions = _c.updatedOptions; + _d.label = 1; + case 1: + _d.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.create(updatedOptions)]; + case 2: + res = _d.sent(); + return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable + })]; + case 3: + e_2 = _d.sent(); + if (((_a = e_2.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when creating a container only if it does not already exist." + }); + return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_2.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_2.response })]; + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_2.message + }); + throw e_2; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Returns true if the Azure container resource represented by this client exists; false otherwise. * @@ -84187,50 +87221,63 @@ class ContainerClient extends StorageClient { * * @param options - */ - async exists(options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-exists", options); - try { - await this.getProperties({ - abortSignal: options.abortSignal, - tracingOptions: updatedOptions.tracingOptions - }); - return true; - } - catch (e) { - if (e.statusCode === 404) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when checking container existence" - }); - return false; - } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + ContainerClient.prototype.exists = function (options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_3; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("ContainerClient-exists", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.getProperties({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions + })]; + case 2: + _b.sent(); + return [2 /*return*/, true]; + case 3: + e_3 = _b.sent(); + if (e_3.statusCode === 404) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when checking container existence" + }); + return [2 /*return*/, false]; + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_3.message + }); + throw e_3; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Creates a {@link BlobClient} * * @param blobName - A blob name * @returns A new BlobClient object for the given blob name. */ - getBlobClient(blobName) { + ContainerClient.prototype.getBlobClient = function (blobName) { return new BlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); - } + }; /** * Creates an {@link AppendBlobClient} * * @param blobName - An append blob name */ - getAppendBlobClient(blobName) { + ContainerClient.prototype.getAppendBlobClient = function (blobName) { return new AppendBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); - } + }; /** * Creates a {@link BlockBlobClient} * @@ -84246,17 +87293,17 @@ class ContainerClient extends StorageClient { * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); * ``` */ - getBlockBlobClient(blobName) { + ContainerClient.prototype.getBlockBlobClient = function (blobName) { return new BlockBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); - } + }; /** * Creates a {@link PageBlobClient} * * @param blobName - A page blob name */ - getPageBlobClient(blobName) { + ContainerClient.prototype.getPageBlobClient = function (blobName) { return new PageBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); - } + }; /** * Returns all user-defined metadata and system properties for the specified * container. The data returned does not include the container's list of blobs. @@ -84269,25 +87316,37 @@ class ContainerClient extends StorageClient { * * @param options - Options to Container Get Properties operation. */ - async getProperties(options = {}) { - if (!options.conditions) { - options.conditions = {}; - } - const { span, updatedOptions } = createSpan("ContainerClient-getProperties", options); - try { - return await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + ContainerClient.prototype.getProperties = function (options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_4; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + if (!options.conditions) { + options.conditions = {}; + } + _a = createSpan("ContainerClient-getProperties", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.containerContext.getProperties(tslib.__assign(tslib.__assign({ abortSignal: options.abortSignal }, options.conditions), convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_4 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_4.message + }); + throw e_4; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Marks the specified container for deletion. The container and any blobs * contained within it are later deleted during garbage collection. @@ -84295,25 +87354,37 @@ class ContainerClient extends StorageClient { * * @param options - Options to Container Delete operation. */ - async delete(options = {}) { - if (!options.conditions) { - options.conditions = {}; - } - const { span, updatedOptions } = createSpan("ContainerClient-delete", options); - try { - return await this.containerContext.delete(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + ContainerClient.prototype.delete = function (options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_5; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + if (!options.conditions) { + options.conditions = {}; + } + _a = createSpan("ContainerClient-delete", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.containerContext.delete(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_5 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_5.message + }); + throw e_5; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Marks the specified container for deletion if it exists. The container and any blobs * contained within it are later deleted during garbage collection. @@ -84321,32 +87392,45 @@ class ContainerClient extends StorageClient { * * @param options - Options to Container Delete operation. */ - async deleteIfExists(options = {}) { + ContainerClient.prototype.deleteIfExists = function (options) { var _a, _b; - const { span, updatedOptions } = createSpan("ContainerClient-deleteIfExists", options); - try { - const res = await this.delete(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when deleting a container only if it exists." - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); - } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _c, span, updatedOptions, res, e_6; + return tslib.__generator(this, function (_d) { + switch (_d.label) { + case 0: + _c = createSpan("ContainerClient-deleteIfExists", options), span = _c.span, updatedOptions = _c.updatedOptions; + _d.label = 1; + case 1: + _d.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.delete(updatedOptions)]; + case 2: + res = _d.sent(); + return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable + })]; + case 3: + e_6 = _d.sent(); + if (((_a = e_6.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when deleting a container only if it exists." + }); + return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_6.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_6.response })]; + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_6.message + }); + throw e_6; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Sets one or more user-defined name-value pairs for the specified container. * @@ -84359,28 +87443,40 @@ class ContainerClient extends StorageClient { * If no value provided the existing metadata will be removed. * @param options - Options to Container Set Metadata operation. */ - async setMetadata(metadata, options = {}) { - if (!options.conditions) { - options.conditions = {}; - } - if (options.conditions.ifUnmodifiedSince) { - throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); - } - const { span, updatedOptions } = createSpan("ContainerClient-setMetadata", options); - try { - return await this.containerContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + ContainerClient.prototype.setMetadata = function (metadata, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_7; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + if (!options.conditions) { + options.conditions = {}; + } + if (options.conditions.ifUnmodifiedSince) { + throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); + } + _a = createSpan("ContainerClient-setMetadata", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.containerContext.setMetadata(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: metadata, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_7 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_7.message + }); + throw e_7; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Gets the permissions for the specified container. The permissions indicate * whether container data may be accessed publicly. @@ -84392,56 +87488,70 @@ class ContainerClient extends StorageClient { * * @param options - Options to Container Get Access Policy operation. */ - async getAccessPolicy(options = {}) { - if (!options.conditions) { - options.conditions = {}; - } - const { span, updatedOptions } = createSpan("ContainerClient-getAccessPolicy", options); - try { - const response = await this.containerContext.getAccessPolicy(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - const res = { - _response: response._response, - blobPublicAccess: response.blobPublicAccess, - date: response.date, - etag: response.etag, - errorCode: response.errorCode, - lastModified: response.lastModified, - requestId: response.requestId, - clientRequestId: response.clientRequestId, - signedIdentifiers: [], - version: response.version - }; - for (const identifier of response) { - let accessPolicy = undefined; - if (identifier.accessPolicy) { - accessPolicy = { - permissions: identifier.accessPolicy.permissions - }; - if (identifier.accessPolicy.expiresOn) { - accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); - } - if (identifier.accessPolicy.startsOn) { - accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn); - } + ContainerClient.prototype.getAccessPolicy = function (options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, response, res, _i, response_1, identifier, accessPolicy, e_8; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + if (!options.conditions) { + options.conditions = {}; + } + _a = createSpan("ContainerClient-getAccessPolicy", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.containerContext.getAccessPolicy(tslib.__assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: + response = _b.sent(); + res = { + _response: response._response, + blobPublicAccess: response.blobPublicAccess, + date: response.date, + etag: response.etag, + errorCode: response.errorCode, + lastModified: response.lastModified, + requestId: response.requestId, + clientRequestId: response.clientRequestId, + signedIdentifiers: [], + version: response.version + }; + for (_i = 0, response_1 = response; _i < response_1.length; _i++) { + identifier = response_1[_i]; + accessPolicy = undefined; + if (identifier.accessPolicy) { + accessPolicy = { + permissions: identifier.accessPolicy.permissions + }; + if (identifier.accessPolicy.expiresOn) { + accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); + } + if (identifier.accessPolicy.startsOn) { + accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn); + } + } + res.signedIdentifiers.push({ + accessPolicy: accessPolicy, + id: identifier.id + }); + } + return [2 /*return*/, res]; + case 3: + e_8 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_8.message + }); + throw e_8; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; } - res.signedIdentifiers.push({ - accessPolicy, - id: identifier.id - }); - } - return res; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Sets the permissions for the specified container. The permissions indicate * whether blobs in a container may be accessed publicly. @@ -84459,47 +87569,60 @@ class ContainerClient extends StorageClient { * @param containerAcl - Array of elements each having a unique Id and details of the access policy. * @param options - Options to Container Set Access Policy operation. */ - async setAccessPolicy(access, containerAcl, options = {}) { - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("ContainerClient-setAccessPolicy", options); - try { - const acl = []; - for (const identifier of containerAcl || []) { - acl.push({ - accessPolicy: { - expiresOn: identifier.accessPolicy.expiresOn - ? truncatedISO8061Date(identifier.accessPolicy.expiresOn) - : "", - permissions: identifier.accessPolicy.permissions, - startsOn: identifier.accessPolicy.startsOn - ? truncatedISO8061Date(identifier.accessPolicy.startsOn) - : "" - }, - id: identifier.id - }); - } - return await this.containerContext.setAccessPolicy(Object.assign({ abortSignal: options.abortSignal, access, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + ContainerClient.prototype.setAccessPolicy = function (access, containerAcl, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, acl, _i, _b, identifier, e_9; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + options.conditions = options.conditions || {}; + _a = createSpan("ContainerClient-setAccessPolicy", options), span = _a.span, updatedOptions = _a.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + acl = []; + for (_i = 0, _b = containerAcl || []; _i < _b.length; _i++) { + identifier = _b[_i]; + acl.push({ + accessPolicy: { + expiresOn: identifier.accessPolicy.expiresOn + ? truncatedISO8061Date(identifier.accessPolicy.expiresOn) + : "", + permissions: identifier.accessPolicy.permissions, + startsOn: identifier.accessPolicy.startsOn + ? truncatedISO8061Date(identifier.accessPolicy.startsOn) + : "" + }, + id: identifier.id + }); + } + return [4 /*yield*/, this.containerContext.setAccessPolicy(tslib.__assign({ abortSignal: options.abortSignal, access: access, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _c.sent()]; + case 3: + e_9 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_9.message + }); + throw e_9; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Get a {@link BlobLeaseClient} that manages leases on the container. * * @param proposeLeaseId - Initial proposed lease Id. * @returns A new BlobLeaseClient object for managing leases on the container. */ - getBlobLeaseClient(proposeLeaseId) { + ContainerClient.prototype.getBlobLeaseClient = function (proposeLeaseId) { return new BlobLeaseClient(this, proposeLeaseId); - } + }; /** * Creates a new block blob, or updates the content of an existing block blob. * @@ -84522,27 +87645,40 @@ class ContainerClient extends StorageClient { * @param options - Options to configure the Block Blob Upload operation. * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. */ - async uploadBlockBlob(blobName, body, contentLength, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-uploadBlockBlob", options); - try { - const blockBlobClient = this.getBlockBlobClient(blobName); - const response = await blockBlobClient.upload(body, contentLength, updatedOptions); - return { - blockBlobClient, - response - }; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + ContainerClient.prototype.uploadBlockBlob = function (blobName, body, contentLength, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, blockBlobClient, response, e_10; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("ContainerClient-uploadBlockBlob", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + blockBlobClient = this.getBlockBlobClient(blobName); + return [4 /*yield*/, blockBlobClient.upload(body, contentLength, updatedOptions)]; + case 2: + response = _b.sent(); + return [2 /*return*/, { + blockBlobClient: blockBlobClient, + response: response + }]; + case 3: + e_10 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_10.message + }); + throw e_10; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Marks the specified blob or snapshot for deletion. The blob is later deleted * during garbage collection. Note that in order to delete a blob, you must delete @@ -84554,26 +87690,38 @@ class ContainerClient extends StorageClient { * @param options - Options to Blob Delete operation. * @returns Block blob deletion response data. */ - async deleteBlob(blobName, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-deleteBlob", options); - try { - let blobClient = this.getBlobClient(blobName); - if (options.versionId) { - blobClient = blobClient.withVersion(options.versionId); - } - return await blobClient.delete(updatedOptions); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + ContainerClient.prototype.deleteBlob = function (blobName, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, blobClient, e_11; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("ContainerClient-deleteBlob", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + blobClient = this.getBlobClient(blobName); + if (options.versionId) { + blobClient = blobClient.withVersion(options.versionId); + } + return [4 /*yield*/, blobClient.delete(updatedOptions)]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_11 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_11.message + }); + throw e_11; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * listBlobFlatSegment returns a single segment of blobs starting from the * specified Marker. Use an empty Marker to start enumeration from the beginning. @@ -84584,27 +87732,40 @@ class ContainerClient extends StorageClient { * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. * @param options - Options to Container List Blob Flat Segment operation. */ - async listBlobFlatSegment(marker, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-listBlobFlatSegment", options); - try { - const response = await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); - return blobItem; - }) }) }); - return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + ContainerClient.prototype.listBlobFlatSegment = function (marker, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, response, wrappedResponse, e_12; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("ContainerClient-listBlobFlatSegment", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.containerContext.listBlobFlatSegment(tslib.__assign(tslib.__assign({ marker: marker }, options), convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: + response = _b.sent(); + wrappedResponse = tslib.__assign(tslib.__assign({}, response), { _response: response._response, segment: tslib.__assign(tslib.__assign({}, response.segment), { blobItems: response.segment.blobItems.map(function (blobItemInteral) { + var blobItem = tslib.__assign(tslib.__assign({}, blobItemInteral), { tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return blobItem; + }) }) }); + return [2 /*return*/, wrappedResponse]; + case 3: + e_12 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_12.message + }); + throw e_12; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * listBlobHierarchySegment returns a single segment of blobs starting from * the specified Marker. Use an empty Marker to start enumeration from the @@ -84616,27 +87777,40 @@ class ContainerClient extends StorageClient { * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. * @param options - Options to Container List Blob Hierarchy Segment operation. */ - async listBlobHierarchySegment(delimiter, marker, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-listBlobHierarchySegment", options); - try { - const response = await this.containerContext.listBlobHierarchySegment(delimiter, Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); - return blobItem; - }) }) }); - return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + ContainerClient.prototype.listBlobHierarchySegment = function (delimiter, marker, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, response, wrappedResponse, e_13; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("ContainerClient-listBlobHierarchySegment", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.containerContext.listBlobHierarchySegment(delimiter, tslib.__assign(tslib.__assign({ marker: marker }, options), convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: + response = _b.sent(); + wrappedResponse = tslib.__assign(tslib.__assign({}, response), { _response: response._response, segment: tslib.__assign(tslib.__assign({}, response.segment), { blobItems: response.segment.blobItems.map(function (blobItemInteral) { + var blobItem = tslib.__assign(tslib.__assign({}, blobItemInteral), { tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return blobItem; + }) }) }); + return [2 /*return*/, wrappedResponse]; + case 3: + e_13 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_13.message + }); + throw e_13; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse * @@ -84649,42 +87823,81 @@ class ContainerClient extends StorageClient { * items. The marker value is opaque to the client. * @param options - Options to list blobs operation. */ - listSegments(marker, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listSegments_1() { - let listBlobsFlatSegmentResponse; - if (!!marker || marker === undefined) { - do { - listBlobsFlatSegmentResponse = yield tslib.__await(this.listBlobFlatSegment(marker, options)); - marker = listBlobsFlatSegmentResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(listBlobsFlatSegmentResponse)); - } while (marker); - } + ContainerClient.prototype.listSegments = function (marker, options) { + if (options === void 0) { options = {}; } + return tslib.__asyncGenerator(this, arguments, function listSegments_1() { + var listBlobsFlatSegmentResponse; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + if (!(!!marker || marker === undefined)) return [3 /*break*/, 7]; + _a.label = 1; + case 1: return [4 /*yield*/, tslib.__await(this.listBlobFlatSegment(marker, options))]; + case 2: + listBlobsFlatSegmentResponse = _a.sent(); + marker = listBlobsFlatSegmentResponse.continuationToken; + return [4 /*yield*/, tslib.__await(listBlobsFlatSegmentResponse)]; + case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_a.sent()])]; + case 4: return [4 /*yield*/, _a.sent()]; + case 5: + _a.sent(); + _a.label = 6; + case 6: + if (marker) return [3 /*break*/, 1]; + _a.label = 7; + case 7: return [2 /*return*/]; + } + }); }); - } + }; /** * Returns an AsyncIterableIterator of {@link BlobItem} objects * * @param options - Options to list blobs operation. */ - listItems(options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listItems_1() { - var e_1, _a; - let marker; - try { - for (var _b = tslib.__asyncValues(this.listSegments(marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const listBlobsFlatSegmentResponse = _c.value; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems))); - } - } - catch (e_1_1) { e_1 = { error: e_1_1 }; } - finally { - try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + ContainerClient.prototype.listItems = function (options) { + if (options === void 0) { options = {}; } + return tslib.__asyncGenerator(this, arguments, function listItems_1() { + var marker, _a, _b, listBlobsFlatSegmentResponse, e_14_1; + var e_14, _c; + return tslib.__generator(this, function (_d) { + switch (_d.label) { + case 0: + _d.trys.push([0, 7, 8, 13]); + _a = tslib.__asyncValues(this.listSegments(marker, options)); + _d.label = 1; + case 1: return [4 /*yield*/, tslib.__await(_a.next())]; + case 2: + if (!(_b = _d.sent(), !_b.done)) return [3 /*break*/, 6]; + listBlobsFlatSegmentResponse = _b.value; + return [5 /*yield**/, tslib.__values(tslib.__asyncDelegator(tslib.__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems)))]; + case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_d.sent()])]; + case 4: + _d.sent(); + _d.label = 5; + case 5: return [3 /*break*/, 1]; + case 6: return [3 /*break*/, 13]; + case 7: + e_14_1 = _d.sent(); + e_14 = { error: e_14_1 }; + return [3 /*break*/, 13]; + case 8: + _d.trys.push([8, , 11, 12]); + if (!(_b && !_b.done && (_c = _a.return))) return [3 /*break*/, 10]; + return [4 /*yield*/, tslib.__await(_c.call(_a))]; + case 9: + _d.sent(); + _d.label = 10; + case 10: return [3 /*break*/, 12]; + case 11: + if (e_14) throw e_14.error; + return [7 /*endfinally*/]; + case 12: return [7 /*endfinally*/]; + case 13: return [2 /*return*/]; } - finally { if (e_1) throw e_1.error; } - } + }); }); - } + }; /** * Returns an async iterable iterator to list all the blobs * under the specified account. @@ -84755,8 +87968,11 @@ class ContainerClient extends StorageClient { * @param options - Options to list blobs. * @returns An asyncIterableIterator that supports paging. */ - listBlobsFlat(options = {}) { - const include = []; + ContainerClient.prototype.listBlobsFlat = function (options) { + var _a; + var _this = this; + if (options === void 0) { options = {}; } + var include = []; if (options.includeCopy) { include.push("copy"); } @@ -84781,30 +87997,32 @@ class ContainerClient extends StorageClient { if (options.prefix === "") { options.prefix = undefined; } - const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); + var updatedOptions = tslib.__assign(tslib.__assign({}, options), (include.length > 0 ? { include: include } : {})); // AsyncIterableIterator to iterate over blobs - const iter = this.listItems(updatedOptions); - return { - /** - * The next method, part of the iteration protocol - */ - next() { - return iter.next(); + var iter = this.listItems(updatedOptions); + return _a = { + /** + * The next method, part of the iteration protocol + */ + next: function () { + return iter.next(); + } }, /** * The connection to the async iterator, part of the iteration protocol */ - [Symbol.asyncIterator]() { + _a[Symbol.asyncIterator] = function () { return this; }, /** * Return an AsyncIterableIterator that works a page at a time */ - byPage: (settings = {}) => { - return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); - } - }; - } + _a.byPage = function (settings) { + if (settings === void 0) { settings = {}; } + return _this.listSegments(settings.continuationToken, tslib.__assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + _a; + }; /** * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse * @@ -84818,51 +88036,106 @@ class ContainerClient extends StorageClient { * items. The marker value is opaque to the client. * @param options - Options to list blobs operation. */ - listHierarchySegments(delimiter, marker, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listHierarchySegments_1() { - let listBlobsHierarchySegmentResponse; - if (!!marker || marker === undefined) { - do { - listBlobsHierarchySegmentResponse = yield tslib.__await(this.listBlobHierarchySegment(delimiter, marker, options)); - marker = listBlobsHierarchySegmentResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(listBlobsHierarchySegmentResponse)); - } while (marker); - } + ContainerClient.prototype.listHierarchySegments = function (delimiter, marker, options) { + if (options === void 0) { options = {}; } + return tslib.__asyncGenerator(this, arguments, function listHierarchySegments_1() { + var listBlobsHierarchySegmentResponse; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + if (!(!!marker || marker === undefined)) return [3 /*break*/, 7]; + _a.label = 1; + case 1: return [4 /*yield*/, tslib.__await(this.listBlobHierarchySegment(delimiter, marker, options))]; + case 2: + listBlobsHierarchySegmentResponse = _a.sent(); + marker = listBlobsHierarchySegmentResponse.continuationToken; + return [4 /*yield*/, tslib.__await(listBlobsHierarchySegmentResponse)]; + case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_a.sent()])]; + case 4: return [4 /*yield*/, _a.sent()]; + case 5: + _a.sent(); + _a.label = 6; + case 6: + if (marker) return [3 /*break*/, 1]; + _a.label = 7; + case 7: return [2 /*return*/]; + } + }); }); - } + }; /** * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. * * @param delimiter - The character or string used to define the virtual hierarchy * @param options - Options to list blobs operation. */ - listItemsByHierarchy(delimiter, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listItemsByHierarchy_1() { - var e_2, _a; - let marker; - try { - for (var _b = tslib.__asyncValues(this.listHierarchySegments(delimiter, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const listBlobsHierarchySegmentResponse = _c.value; - const segment = listBlobsHierarchySegmentResponse.segment; - if (segment.blobPrefixes) { - for (const prefix of segment.blobPrefixes) { - yield yield tslib.__await(Object.assign({ kind: "prefix" }, prefix)); - } - } - for (const blob of segment.blobItems) { - yield yield tslib.__await(Object.assign({ kind: "blob" }, blob)); - } - } - } - catch (e_2_1) { e_2 = { error: e_2_1 }; } - finally { - try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + ContainerClient.prototype.listItemsByHierarchy = function (delimiter, options) { + if (options === void 0) { options = {}; } + return tslib.__asyncGenerator(this, arguments, function listItemsByHierarchy_1() { + var marker, _a, _b, listBlobsHierarchySegmentResponse, segment, _i, _c, prefix, _d, _e, blob, e_15_1; + var e_15, _f; + return tslib.__generator(this, function (_g) { + switch (_g.label) { + case 0: + _g.trys.push([0, 14, 15, 20]); + _a = tslib.__asyncValues(this.listHierarchySegments(delimiter, marker, options)); + _g.label = 1; + case 1: return [4 /*yield*/, tslib.__await(_a.next())]; + case 2: + if (!(_b = _g.sent(), !_b.done)) return [3 /*break*/, 13]; + listBlobsHierarchySegmentResponse = _b.value; + segment = listBlobsHierarchySegmentResponse.segment; + if (!segment.blobPrefixes) return [3 /*break*/, 7]; + _i = 0, _c = segment.blobPrefixes; + _g.label = 3; + case 3: + if (!(_i < _c.length)) return [3 /*break*/, 7]; + prefix = _c[_i]; + return [4 /*yield*/, tslib.__await(tslib.__assign({ kind: "prefix" }, prefix))]; + case 4: return [4 /*yield*/, _g.sent()]; + case 5: + _g.sent(); + _g.label = 6; + case 6: + _i++; + return [3 /*break*/, 3]; + case 7: + _d = 0, _e = segment.blobItems; + _g.label = 8; + case 8: + if (!(_d < _e.length)) return [3 /*break*/, 12]; + blob = _e[_d]; + return [4 /*yield*/, tslib.__await(tslib.__assign({ kind: "blob" }, blob))]; + case 9: return [4 /*yield*/, _g.sent()]; + case 10: + _g.sent(); + _g.label = 11; + case 11: + _d++; + return [3 /*break*/, 8]; + case 12: return [3 /*break*/, 1]; + case 13: return [3 /*break*/, 20]; + case 14: + e_15_1 = _g.sent(); + e_15 = { error: e_15_1 }; + return [3 /*break*/, 20]; + case 15: + _g.trys.push([15, , 18, 19]); + if (!(_b && !_b.done && (_f = _a.return))) return [3 /*break*/, 17]; + return [4 /*yield*/, tslib.__await(_f.call(_a))]; + case 16: + _g.sent(); + _g.label = 17; + case 17: return [3 /*break*/, 19]; + case 18: + if (e_15) throw e_15.error; + return [7 /*endfinally*/]; + case 19: return [7 /*endfinally*/]; + case 20: return [2 /*return*/]; } - finally { if (e_2) throw e_2.error; } - } + }); }); - } + }; /** * Returns an async iterable iterator to list all the blobs by hierarchy. * under the specified account. @@ -84939,11 +88212,14 @@ class ContainerClient extends StorageClient { * @param delimiter - The character or string used to define the virtual hierarchy * @param options - Options to list blobs operation. */ - listBlobsByHierarchy(delimiter, options = {}) { + ContainerClient.prototype.listBlobsByHierarchy = function (delimiter, options) { + var _a; + var _this = this; + if (options === void 0) { options = {}; } if (delimiter === "") { throw new RangeError("delimiter should contain one or more characters"); } - const include = []; + var include = []; if (options.includeCopy) { include.push("copy"); } @@ -84968,39 +88244,45 @@ class ContainerClient extends StorageClient { if (options.prefix === "") { options.prefix = undefined; } - const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); + var updatedOptions = tslib.__assign(tslib.__assign({}, options), (include.length > 0 ? { include: include } : {})); // AsyncIterableIterator to iterate over blob prefixes and blobs - const iter = this.listItemsByHierarchy(delimiter, updatedOptions); - return { - /** - * The next method, part of the iteration protocol - */ - async next() { - return iter.next(); + var iter = this.listItemsByHierarchy(delimiter, updatedOptions); + return _a = { + /** + * The next method, part of the iteration protocol + */ + next: function () { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + return [2 /*return*/, iter.next()]; + }); + }); + } }, /** * The connection to the async iterator, part of the iteration protocol */ - [Symbol.asyncIterator]() { + _a[Symbol.asyncIterator] = function () { return this; }, /** * Return an AsyncIterableIterator that works a page at a time */ - byPage: (settings = {}) => { - return this.listHierarchySegments(delimiter, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); - } - }; - } - getContainerNameFromUrl() { - let containerName; + _a.byPage = function (settings) { + if (settings === void 0) { settings = {}; } + return _this.listHierarchySegments(delimiter, settings.continuationToken, tslib.__assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + _a; + }; + ContainerClient.prototype.getContainerNameFromUrl = function () { + var containerName; try { // URL may look like the following // "https://myaccount.blob.core.windows.net/mycontainer?sasString"; // "https://myaccount.blob.core.windows.net/mycontainer"; // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername` // http://localhost:10001/devstoreaccount1/containername - const parsedUrl = coreHttp.URLBuilder.parse(this.url); + var parsedUrl = coreHttp.URLBuilder.parse(this.url); if (parsedUrl.getHost().split(".")[1] === "blob") { // "https://myaccount.blob.core.windows.net/containername". // "https://customdomain.com/containername". @@ -85028,7 +88310,7 @@ class ContainerClient extends StorageClient { catch (error) { throw new Error("Unable to extract containerName with provided information."); } - } + }; /** * Only available for ContainerClient constructed with a shared key credential. * @@ -85040,15 +88322,16 @@ class ContainerClient extends StorageClient { * @param options - Optional parameters. * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ - generateSasUrl(options) { - return new Promise((resolve) => { - if (!(this.credential instanceof StorageSharedKeyCredential)) { + ContainerClient.prototype.generateSasUrl = function (options) { + var _this = this; + return new Promise(function (resolve) { + if (!(_this.credential instanceof StorageSharedKeyCredential)) { throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); } - const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), this.credential).toString(); - resolve(appendToURLQuery(this.url, sas)); + var sas = generateBlobSASQueryParameters(tslib.__assign({ containerName: _this._containerName }, options), _this.credential).toString(); + resolve(appendToURLQuery(_this.url, sas)); }); - } + }; /** * Creates a BlobBatchClient object to conduct batch operations. * @@ -85056,10 +88339,11 @@ class ContainerClient extends StorageClient { * * @returns A new BlobBatchClient object for this container. */ - getBlobBatchClient() { + ContainerClient.prototype.getBlobBatchClient = function () { return new BlobBatchClient(this.url, this.pipeline); - } -} + }; + return ContainerClient; +}(StorageClient)); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. @@ -85072,8 +88356,8 @@ class ContainerClient extends StorageClient { * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but * the order of the permissions is particular and this class guarantees correctness. */ -class AccountSASPermissions { - constructor() { +var AccountSASPermissions = /** @class */ (function () { + function AccountSASPermissions() { /** * Permission to read resources and list queues and tables granted. */ @@ -85124,9 +88408,10 @@ class AccountSASPermissions { * * @param permissions - */ - static parse(permissions) { - const accountSASPermissions = new AccountSASPermissions(); - for (const c of permissions) { + AccountSASPermissions.parse = function (permissions) { + var accountSASPermissions = new AccountSASPermissions(); + for (var _i = 0, permissions_1 = permissions; _i < permissions_1.length; _i++) { + var c = permissions_1[_i]; switch (c) { case "r": accountSASPermissions.read = true; @@ -85162,19 +88447,19 @@ class AccountSASPermissions { accountSASPermissions.filter = true; break; default: - throw new RangeError(`Invalid permission character: ${c}`); + throw new RangeError("Invalid permission character: " + c); } } return accountSASPermissions; - } + }; /** * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it * and boolean values for them. * * @param permissionLike - */ - static from(permissionLike) { - const accountSASPermissions = new AccountSASPermissions(); + AccountSASPermissions.from = function (permissionLike) { + var accountSASPermissions = new AccountSASPermissions(); if (permissionLike.read) { accountSASPermissions.read = true; } @@ -85209,7 +88494,7 @@ class AccountSASPermissions { accountSASPermissions.process = true; } return accountSASPermissions; - } + }; /** * Produces the SAS permissions string for an Azure Storage account. * Call this method to set AccountSASSignatureValues Permissions field. @@ -85220,11 +88505,11 @@ class AccountSASPermissions { * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas * */ - toString() { + AccountSASPermissions.prototype.toString = function () { // The order of the characters should be as specified here to ensure correctness: // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas // Use a string array instead of string concatenating += operator for performance - const permissions = []; + var permissions = []; if (this.read) { permissions.push("r"); } @@ -85259,8 +88544,9 @@ class AccountSASPermissions { permissions.push("p"); } return permissions.join(""); - } -} + }; + return AccountSASPermissions; +}()); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. @@ -85273,8 +88559,8 @@ class AccountSASPermissions { * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but * the order of the resources is particular and this class guarantees correctness. */ -class AccountSASResourceTypes { - constructor() { +var AccountSASResourceTypes = /** @class */ (function () { + function AccountSASResourceTypes() { /** * Permission to access service level APIs granted. */ @@ -85294,9 +88580,10 @@ class AccountSASResourceTypes { * * @param resourceTypes - */ - static parse(resourceTypes) { - const accountSASResourceTypes = new AccountSASResourceTypes(); - for (const c of resourceTypes) { + AccountSASResourceTypes.parse = function (resourceTypes) { + var accountSASResourceTypes = new AccountSASResourceTypes(); + for (var _i = 0, resourceTypes_1 = resourceTypes; _i < resourceTypes_1.length; _i++) { + var c = resourceTypes_1[_i]; switch (c) { case "s": accountSASResourceTypes.service = true; @@ -85308,19 +88595,19 @@ class AccountSASResourceTypes { accountSASResourceTypes.object = true; break; default: - throw new RangeError(`Invalid resource type: ${c}`); + throw new RangeError("Invalid resource type: " + c); } } return accountSASResourceTypes; - } + }; /** * Converts the given resource types to a string. * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas * */ - toString() { - const resourceTypes = []; + AccountSASResourceTypes.prototype.toString = function () { + var resourceTypes = []; if (this.service) { resourceTypes.push("s"); } @@ -85331,8 +88618,9 @@ class AccountSASResourceTypes { resourceTypes.push("o"); } return resourceTypes.join(""); - } -} + }; + return AccountSASResourceTypes; +}()); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. @@ -85345,8 +88633,8 @@ class AccountSASResourceTypes { * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but * the order of the services is particular and this class guarantees correctness. */ -class AccountSASServices { - constructor() { +var AccountSASServices = /** @class */ (function () { + function AccountSASServices() { /** * Permission to access blob resources granted. */ @@ -85370,9 +88658,10 @@ class AccountSASServices { * * @param services - */ - static parse(services) { - const accountSASServices = new AccountSASServices(); - for (const c of services) { + AccountSASServices.parse = function (services) { + var accountSASServices = new AccountSASServices(); + for (var _i = 0, services_1 = services; _i < services_1.length; _i++) { + var c = services_1[_i]; switch (c) { case "b": accountSASServices.blob = true; @@ -85387,17 +88676,17 @@ class AccountSASServices { accountSASServices.table = true; break; default: - throw new RangeError(`Invalid service character: ${c}`); + throw new RangeError("Invalid service character: " + c); } } return accountSASServices; - } + }; /** * Converts the given services to a string. * */ - toString() { - const services = []; + AccountSASServices.prototype.toString = function () { + var services = []; if (this.blob) { services.push("b"); } @@ -85411,8 +88700,9 @@ class AccountSASServices { services.push("f"); } return services.join(""); - } -} + }; + return AccountSASServices; +}()); // Copyright (c) Microsoft Corporation. /** @@ -85427,7 +88717,7 @@ class AccountSASServices { * @param sharedKeyCredential - */ function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) { - const version = accountSASSignatureValues.version + var version = accountSASSignatureValues.version ? accountSASSignatureValues.version : SERVICE_VERSION; if (accountSASSignatureValues.permissions && @@ -85445,10 +88735,10 @@ function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyC version < "2019-12-12") { throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); } - const parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); - const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); - const parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); - const stringToSign = [ + var parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); + var parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); + var parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); + var stringToSign = [ sharedKeyCredential.accountName, parsedPermissions, parsedServices, @@ -85462,7 +88752,7 @@ function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyC version, "" // Account SAS requires an additional newline character ].join("\n"); - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + var signature = sharedKeyCredential.computeHMACSHA256(stringToSign); return new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange); } @@ -85470,9 +88760,11 @@ function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyC * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you * to manipulate blob containers. */ -class BlobServiceClient extends StorageClient { - constructor(url, credentialOrPipeline, options) { - let pipeline; +var BlobServiceClient = /** @class */ (function (_super) { + tslib.__extends(BlobServiceClient, _super); + function BlobServiceClient(url, credentialOrPipeline, options) { + var _this = this; + var pipeline; if (credentialOrPipeline instanceof Pipeline) { pipeline = credentialOrPipeline; } @@ -85485,8 +88777,9 @@ class BlobServiceClient extends StorageClient { // The second parameter is undefined. Use anonymous credential pipeline = newPipeline(new AnonymousCredential(), options); } - super(url, pipeline); - this.serviceContext = new Service(this.storageClientContext); + _this = _super.call(this, url, pipeline) || this; + _this.serviceContext = new Service(_this.storageClientContext); + return _this; } /** * @@ -85500,25 +88793,25 @@ class BlobServiceClient extends StorageClient { * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` * @param options - Optional. Options to configure the HTTP pipeline. */ - static fromConnectionString(connectionString, options) { + BlobServiceClient.fromConnectionString = function (connectionString, options) { options = options || {}; - const extractedCreds = extractConnectionStringParts(connectionString); + var extractedCreds = extractConnectionStringParts(connectionString); if (extractedCreds.kind === "AccountConnString") { { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); - const pipeline = newPipeline(sharedKeyCredential, options); + var pipeline = newPipeline(sharedKeyCredential, options); return new BlobServiceClient(extractedCreds.url, pipeline); } } else if (extractedCreds.kind === "SASConnString") { - const pipeline = newPipeline(new AnonymousCredential(), options); + var pipeline = newPipeline(new AnonymousCredential(), options); return new BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline); } else { throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } - } + }; /** * Creates a {@link ContainerClient} object * @@ -85531,9 +88824,9 @@ class BlobServiceClient extends StorageClient { * const containerClient = blobServiceClient.getContainerClient(""); * ``` */ - getContainerClient(containerName) { + BlobServiceClient.prototype.getContainerClient = function (containerName) { return new ContainerClient(appendToURLPath(this.url, encodeURIComponent(containerName)), this.pipeline); - } + }; /** * Create a Blob container. * @@ -85541,27 +88834,40 @@ class BlobServiceClient extends StorageClient { * @param options - Options to configure Container Create operation. * @returns Container creation response and the corresponding container client. */ - async createContainer(containerName, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-createContainer", options); - try { - const containerClient = this.getContainerClient(containerName); - const containerCreateResponse = await containerClient.create(updatedOptions); - return { - containerClient, - containerCreateResponse - }; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobServiceClient.prototype.createContainer = function (containerName, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, containerClient, containerCreateResponse, e_1; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobServiceClient-createContainer", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + containerClient = this.getContainerClient(containerName); + return [4 /*yield*/, containerClient.create(updatedOptions)]; + case 2: + containerCreateResponse = _b.sent(); + return [2 /*return*/, { + containerClient: containerClient, + containerCreateResponse: containerCreateResponse + }]; + case 3: + e_1 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_1.message + }); + throw e_1; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Deletes a Blob container. * @@ -85569,23 +88875,35 @@ class BlobServiceClient extends StorageClient { * @param options - Options to configure Container Delete operation. * @returns Container deletion response. */ - async deleteContainer(containerName, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-deleteContainer", options); - try { - const containerClient = this.getContainerClient(containerName); - return await containerClient.delete(updatedOptions); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobServiceClient.prototype.deleteContainer = function (containerName, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, containerClient, e_2; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobServiceClient-deleteContainer", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + containerClient = this.getContainerClient(containerName); + return [4 /*yield*/, containerClient.delete(updatedOptions)]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_2 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_2.message + }); + throw e_2; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Restore a previously deleted Blob container. * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. @@ -85595,27 +88913,39 @@ class BlobServiceClient extends StorageClient { * @param options - Options to configure Container Restore operation. * @returns Container deletion response. */ - async undeleteContainer(deletedContainerName, deletedContainerVersion, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-undeleteContainer", options); - try { - const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName); - // Hack to access a protected member. - const containerContext = new Container(containerClient["storageClientContext"]); - const containerUndeleteResponse = await containerContext.restore(Object.assign({ deletedContainerName, - deletedContainerVersion }, updatedOptions)); - return { containerClient, containerUndeleteResponse }; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobServiceClient.prototype.undeleteContainer = function (deletedContainerName, deletedContainerVersion, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, containerClient, containerContext, containerUndeleteResponse, e_3; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobServiceClient-undeleteContainer", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName); + containerContext = new Container(containerClient["storageClientContext"]); + return [4 /*yield*/, containerContext.restore(tslib.__assign({ deletedContainerName: deletedContainerName, + deletedContainerVersion: deletedContainerVersion }, updatedOptions))]; + case 2: + containerUndeleteResponse = _b.sent(); + return [2 /*return*/, { containerClient: containerClient, containerUndeleteResponse: containerUndeleteResponse }]; + case 3: + e_3 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_3.message + }); + throw e_3; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Rename an existing Blob Container. * @@ -85624,27 +88954,39 @@ class BlobServiceClient extends StorageClient { * @param options - Options to configure Container Rename operation. */ // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready. - async renameContainer(sourceContainerName, destinationContainerName, options = {}) { + BlobServiceClient.prototype.renameContainer = function (sourceContainerName, destinationContainerName, options) { var _a; - const { span, updatedOptions } = createSpan("BlobServiceClient-renameContainer", options); - try { - const containerClient = this.getContainerClient(destinationContainerName); - // Hack to access a protected member. - const containerContext = new Container(containerClient["storageClientContext"]); - const containerRenameResponse = await containerContext.rename(sourceContainerName, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId })); - return { containerClient, containerRenameResponse }; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _b, span, updatedOptions, containerClient, containerContext, containerRenameResponse, e_4; + return tslib.__generator(this, function (_c) { + switch (_c.label) { + case 0: + _b = createSpan("BlobServiceClient-renameContainer", options), span = _b.span, updatedOptions = _b.updatedOptions; + _c.label = 1; + case 1: + _c.trys.push([1, 3, 4, 5]); + containerClient = this.getContainerClient(destinationContainerName); + containerContext = new Container(containerClient["storageClientContext"]); + return [4 /*yield*/, containerContext.rename(sourceContainerName, tslib.__assign(tslib.__assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId }))]; + case 2: + containerRenameResponse = _c.sent(); + return [2 /*return*/, { containerClient: containerClient, containerRenameResponse: containerRenameResponse }]; + case 3: + e_4 = _c.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_4.message + }); + throw e_4; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Gets the properties of a storage account’s Blob service, including properties * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. @@ -85653,22 +88995,34 @@ class BlobServiceClient extends StorageClient { * @param options - Options to the Service Get Properties operation. * @returns Response data for the Service Get Properties operation. */ - async getProperties(options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getProperties", options); - try { - return await this.serviceContext.getProperties(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobServiceClient.prototype.getProperties = function (options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_5; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobServiceClient-getProperties", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.serviceContext.getProperties(tslib.__assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_5 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_5.message + }); + throw e_5; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Sets properties for a storage account’s Blob service endpoint, including properties * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. @@ -85678,22 +89032,34 @@ class BlobServiceClient extends StorageClient { * @param options - Options to the Service Set Properties operation. * @returns Response data for the Service Set Properties operation. */ - async setProperties(properties, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-setProperties", options); - try { - return await this.serviceContext.setProperties(properties, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobServiceClient.prototype.setProperties = function (properties, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_6; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobServiceClient-setProperties", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.serviceContext.setProperties(properties, tslib.__assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_6 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_6.message + }); + throw e_6; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Retrieves statistics related to replication for the Blob service. It is only * available on the secondary location endpoint when read-access geo-redundant @@ -85703,22 +89069,34 @@ class BlobServiceClient extends StorageClient { * @param options - Options to the Service Get Statistics operation. * @returns Response data for the Service Get Statistics operation. */ - async getStatistics(options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getStatistics", options); - try { - return await this.serviceContext.getStatistics(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobServiceClient.prototype.getStatistics = function (options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_7; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobServiceClient-getStatistics", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.serviceContext.getStatistics(tslib.__assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_7 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_7.message + }); + throw e_7; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * The Get Account Information operation returns the sku name and account kind * for the specified account. @@ -85729,22 +89107,34 @@ class BlobServiceClient extends StorageClient { * @param options - Options to the Service Get Account Info operation. * @returns Response data for the Service Get Account Info operation. */ - async getAccountInfo(options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getAccountInfo", options); - try { - return await this.serviceContext.getAccountInfo(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobServiceClient.prototype.getAccountInfo = function (options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_8; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobServiceClient-getAccountInfo", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.serviceContext.getAccountInfo(tslib.__assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_8 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_8.message + }); + throw e_8; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Returns a list of the containers under the specified account. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2 @@ -85759,22 +89149,34 @@ class BlobServiceClient extends StorageClient { * @param options - Options to the Service List Container Segment operation. * @returns Response data for the Service List Container Segment operation. */ - async listContainersSegment(marker, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-listContainersSegment", options); - try { - return await this.serviceContext.listContainersSegment(Object.assign(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include }), convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobServiceClient.prototype.listContainersSegment = function (marker, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, e_9; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobServiceClient-listContainersSegment", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.serviceContext.listContainersSegment(tslib.__assign(tslib.__assign(tslib.__assign({ abortSignal: options.abortSignal, marker: marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include }), convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: return [2 /*return*/, _b.sent()]; + case 3: + e_9 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_9.message + }); + throw e_9; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * The Filter Blobs operation enables callers to list blobs across all containers whose tags * match a given search expression. Filter blobs searches across all containers within a @@ -85793,31 +89195,44 @@ class BlobServiceClient extends StorageClient { * items. The marker value is opaque to the client. * @param options - Options to find blobs by tags. */ - async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-findBlobsByTagsSegment", options); - try { - const response = await this.serviceContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { - var _a; - let tagValue = ""; - if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { - tagValue = blob.tags.blobTagSet[0].value; - } - return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); - }) }); - return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobServiceClient.prototype.findBlobsByTagsSegment = function (tagFilterSqlExpression, marker, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, response, wrappedResponse, e_10; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobServiceClient-findBlobsByTagsSegment", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.serviceContext.filterBlobs(tslib.__assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker: marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: + response = _b.sent(); + wrappedResponse = tslib.__assign(tslib.__assign({}, response), { _response: response._response, blobs: response.blobs.map(function (blob) { + var _a; + var tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return tslib.__assign(tslib.__assign({}, blob), { tags: toTags(blob.tags), tagValue: tagValue }); + }) }); + return [2 /*return*/, wrappedResponse]; + case 3: + e_10 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_10.message + }); + throw e_10; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. * @@ -85834,19 +89249,33 @@ class BlobServiceClient extends StorageClient { * items. The marker value is opaque to the client. * @param options - Options to find blobs by tags. */ - findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { - let response; - if (!!marker || marker === undefined) { - do { - response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); - response.blobs = response.blobs || []; - marker = response.continuationToken; - yield yield tslib.__await(response); - } while (marker); - } + BlobServiceClient.prototype.findBlobsByTagsSegments = function (tagFilterSqlExpression, marker, options) { + if (options === void 0) { options = {}; } + return tslib.__asyncGenerator(this, arguments, function findBlobsByTagsSegments_1() { + var response; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + if (!(!!marker || marker === undefined)) return [3 /*break*/, 6]; + _a.label = 1; + case 1: return [4 /*yield*/, tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options))]; + case 2: + response = _a.sent(); + response.blobs = response.blobs || []; + marker = response.continuationToken; + return [4 /*yield*/, tslib.__await(response)]; + case 3: return [4 /*yield*/, _a.sent()]; + case 4: + _a.sent(); + _a.label = 5; + case 5: + if (marker) return [3 /*break*/, 1]; + _a.label = 6; + case 6: return [2 /*return*/]; + } + }); }); - } + }; /** * Returns an AsyncIterableIterator for blobs. * @@ -85856,25 +89285,49 @@ class BlobServiceClient extends StorageClient { * however, only a subset of the OData filter syntax is supported in the Blob service. * @param options - Options to findBlobsByTagsItems. */ - findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { - var e_1, _a; - let marker; - try { - for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const segment = _c.value; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); - } - } - catch (e_1_1) { e_1 = { error: e_1_1 }; } - finally { - try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + BlobServiceClient.prototype.findBlobsByTagsItems = function (tagFilterSqlExpression, options) { + if (options === void 0) { options = {}; } + return tslib.__asyncGenerator(this, arguments, function findBlobsByTagsItems_1() { + var marker, _a, _b, segment, e_11_1; + var e_11, _c; + return tslib.__generator(this, function (_d) { + switch (_d.label) { + case 0: + _d.trys.push([0, 7, 8, 13]); + _a = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)); + _d.label = 1; + case 1: return [4 /*yield*/, tslib.__await(_a.next())]; + case 2: + if (!(_b = _d.sent(), !_b.done)) return [3 /*break*/, 6]; + segment = _b.value; + return [5 /*yield**/, tslib.__values(tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs)))]; + case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_d.sent()])]; + case 4: + _d.sent(); + _d.label = 5; + case 5: return [3 /*break*/, 1]; + case 6: return [3 /*break*/, 13]; + case 7: + e_11_1 = _d.sent(); + e_11 = { error: e_11_1 }; + return [3 /*break*/, 13]; + case 8: + _d.trys.push([8, , 11, 12]); + if (!(_b && !_b.done && (_c = _a.return))) return [3 /*break*/, 10]; + return [4 /*yield*/, tslib.__await(_c.call(_a))]; + case 9: + _d.sent(); + _d.label = 10; + case 10: return [3 /*break*/, 12]; + case 11: + if (e_11) throw e_11.error; + return [7 /*endfinally*/]; + case 12: return [7 /*endfinally*/]; + case 13: return [2 /*return*/]; } - finally { if (e_1) throw e_1.error; } - } + }); }); - } + }; /** * Returns an async iterable iterator to find all blobs with specified tag * under the specified account. @@ -85954,31 +89407,36 @@ class BlobServiceClient extends StorageClient { * however, only a subset of the OData filter syntax is supported in the Blob service. * @param options - Options to find blobs by tags. */ - findBlobsByTags(tagFilterSqlExpression, options = {}) { + BlobServiceClient.prototype.findBlobsByTags = function (tagFilterSqlExpression, options) { + var _a; + var _this = this; + if (options === void 0) { options = {}; } // AsyncIterableIterator to iterate over blobs - const listSegmentOptions = Object.assign({}, options); - const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); - return { - /** - * The next method, part of the iteration protocol - */ - next() { - return iter.next(); + var listSegmentOptions = tslib.__assign({}, options); + var iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return _a = { + /** + * The next method, part of the iteration protocol + */ + next: function () { + return iter.next(); + } }, /** * The connection to the async iterator, part of the iteration protocol */ - [Symbol.asyncIterator]() { + _a[Symbol.asyncIterator] = function () { return this; }, /** * Return an AsyncIterableIterator that works a page at a time */ - byPage: (settings = {}) => { - return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); - } - }; - } + _a.byPage = function (settings) { + if (settings === void 0) { settings = {}; } + return _this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, tslib.__assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + _a; + }; /** * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses * @@ -85991,44 +89449,83 @@ class BlobServiceClient extends StorageClient { * items. The marker value is opaque to the client. * @param options - Options to list containers operation. */ - listSegments(marker, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listSegments_1() { - let listContainersSegmentResponse; - if (!!marker || marker === undefined) { - do { - listContainersSegmentResponse = yield tslib.__await(this.listContainersSegment(marker, options)); - listContainersSegmentResponse.containerItems = - listContainersSegmentResponse.containerItems || []; - marker = listContainersSegmentResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(listContainersSegmentResponse)); - } while (marker); - } + BlobServiceClient.prototype.listSegments = function (marker, options) { + if (options === void 0) { options = {}; } + return tslib.__asyncGenerator(this, arguments, function listSegments_1() { + var listContainersSegmentResponse; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + if (!(!!marker || marker === undefined)) return [3 /*break*/, 7]; + _a.label = 1; + case 1: return [4 /*yield*/, tslib.__await(this.listContainersSegment(marker, options))]; + case 2: + listContainersSegmentResponse = _a.sent(); + listContainersSegmentResponse.containerItems = + listContainersSegmentResponse.containerItems || []; + marker = listContainersSegmentResponse.continuationToken; + return [4 /*yield*/, tslib.__await(listContainersSegmentResponse)]; + case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_a.sent()])]; + case 4: return [4 /*yield*/, _a.sent()]; + case 5: + _a.sent(); + _a.label = 6; + case 6: + if (marker) return [3 /*break*/, 1]; + _a.label = 7; + case 7: return [2 /*return*/]; + } + }); }); - } + }; /** * Returns an AsyncIterableIterator for Container Items * * @param options - Options to list containers operation. */ - listItems(options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listItems_1() { - var e_2, _a; - let marker; - try { - for (var _b = tslib.__asyncValues(this.listSegments(marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const segment = _c.value; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems))); + BlobServiceClient.prototype.listItems = function (options) { + if (options === void 0) { options = {}; } + return tslib.__asyncGenerator(this, arguments, function listItems_1() { + var marker, _a, _b, segment, e_12_1; + var e_12, _c; + return tslib.__generator(this, function (_d) { + switch (_d.label) { + case 0: + _d.trys.push([0, 7, 8, 13]); + _a = tslib.__asyncValues(this.listSegments(marker, options)); + _d.label = 1; + case 1: return [4 /*yield*/, tslib.__await(_a.next())]; + case 2: + if (!(_b = _d.sent(), !_b.done)) return [3 /*break*/, 6]; + segment = _b.value; + return [5 /*yield**/, tslib.__values(tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems)))]; + case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_d.sent()])]; + case 4: + _d.sent(); + _d.label = 5; + case 5: return [3 /*break*/, 1]; + case 6: return [3 /*break*/, 13]; + case 7: + e_12_1 = _d.sent(); + e_12 = { error: e_12_1 }; + return [3 /*break*/, 13]; + case 8: + _d.trys.push([8, , 11, 12]); + if (!(_b && !_b.done && (_c = _a.return))) return [3 /*break*/, 10]; + return [4 /*yield*/, tslib.__await(_c.call(_a))]; + case 9: + _d.sent(); + _d.label = 10; + case 10: return [3 /*break*/, 12]; + case 11: + if (e_12) throw e_12.error; + return [7 /*endfinally*/]; + case 12: return [7 /*endfinally*/]; + case 13: return [2 /*return*/]; } - } - catch (e_2_1) { e_2 = { error: e_2_1 }; } - finally { - try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); - } - finally { if (e_2) throw e_2.error; } - } + }); }); - } + }; /** * Returns an async iterable iterator to list all the containers * under the specified account. @@ -86103,11 +89600,14 @@ class BlobServiceClient extends StorageClient { * @param options - Options to list containers. * @returns An asyncIterableIterator that supports paging. */ - listContainers(options = {}) { + BlobServiceClient.prototype.listContainers = function (options) { + var _a; + var _this = this; + if (options === void 0) { options = {}; } if (options.prefix === "") { options.prefix = undefined; } - const include = []; + var include = []; if (options.includeDeleted) { include.push("deleted"); } @@ -86115,29 +89615,31 @@ class BlobServiceClient extends StorageClient { include.push("metadata"); } // AsyncIterableIterator to iterate over containers - const listSegmentOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include } : {})); - const iter = this.listItems(listSegmentOptions); - return { - /** - * The next method, part of the iteration protocol - */ - next() { - return iter.next(); + var listSegmentOptions = tslib.__assign(tslib.__assign({}, options), (include.length > 0 ? { include: include } : {})); + var iter = this.listItems(listSegmentOptions); + return _a = { + /** + * The next method, part of the iteration protocol + */ + next: function () { + return iter.next(); + } }, /** * The connection to the async iterator, part of the iteration protocol */ - [Symbol.asyncIterator]() { + _a[Symbol.asyncIterator] = function () { return this; }, /** * Return an AsyncIterableIterator that works a page at a time */ - byPage: (settings = {}) => { - return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); - } - }; - } + _a.byPage = function (settings) { + if (settings === void 0) { settings = {}; } + return _this.listSegments(settings.continuationToken, tslib.__assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + _a; + }; /** * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). * @@ -86149,36 +89651,49 @@ class BlobServiceClient extends StorageClient { * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time */ - async getUserDelegationKey(startsOn, expiresOn, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getUserDelegationKey", options); - try { - const response = await this.serviceContext.getUserDelegationKey({ - startsOn: truncatedISO8061Date(startsOn, false), - expiresOn: truncatedISO8061Date(expiresOn, false) - }, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - const userDelegationKey = { - signedObjectId: response.signedObjectId, - signedTenantId: response.signedTenantId, - signedStartsOn: new Date(response.signedStartsOn), - signedExpiresOn: new Date(response.signedExpiresOn), - signedService: response.signedService, - signedVersion: response.signedVersion, - value: response.value - }; - const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); - return res; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + BlobServiceClient.prototype.getUserDelegationKey = function (startsOn, expiresOn, options) { + if (options === void 0) { options = {}; } + return tslib.__awaiter(this, void 0, void 0, function () { + var _a, span, updatedOptions, response, userDelegationKey, res, e_13; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + _a = createSpan("BlobServiceClient-getUserDelegationKey", options), span = _a.span, updatedOptions = _a.updatedOptions; + _b.label = 1; + case 1: + _b.trys.push([1, 3, 4, 5]); + return [4 /*yield*/, this.serviceContext.getUserDelegationKey({ + startsOn: truncatedISO8061Date(startsOn, false), + expiresOn: truncatedISO8061Date(expiresOn, false) + }, tslib.__assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions)))]; + case 2: + response = _b.sent(); + userDelegationKey = { + signedObjectId: response.signedObjectId, + signedTenantId: response.signedTenantId, + signedStartsOn: new Date(response.signedStartsOn), + signedExpiresOn: new Date(response.signedExpiresOn), + signedService: response.signedService, + signedVersion: response.signedVersion, + value: response.value + }; + res = tslib.__assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); + return [2 /*return*/, res]; + case 3: + e_13 = _b.sent(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e_13.message + }); + throw e_13; + case 4: + span.end(); + return [7 /*endfinally*/]; + case 5: return [2 /*return*/]; + } }); - throw e; - } - finally { - span.end(); - } - } + }); + }; /** * Creates a BlobBatchClient object to conduct batch operations. * @@ -86186,9 +89701,9 @@ class BlobServiceClient extends StorageClient { * * @returns A new BlobBatchClient object for this service. */ - getBlobBatchClient() { + BlobServiceClient.prototype.getBlobBatchClient = function () { return new BlobBatchClient(this.url, this.pipeline); - } + }; /** * Only available for BlobServiceClient constructed with a shared key credential. * @@ -86203,20 +89718,24 @@ class BlobServiceClient extends StorageClient { * @param options - Optional parameters. * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ - generateAccountSasUrl(expiresOn, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { + BlobServiceClient.prototype.generateAccountSasUrl = function (expiresOn, permissions, resourceTypes, options) { + if (permissions === void 0) { permissions = AccountSASPermissions.parse("r"); } + if (resourceTypes === void 0) { resourceTypes = "sco"; } + if (options === void 0) { options = {}; } if (!(this.credential instanceof StorageSharedKeyCredential)) { throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); } if (expiresOn === undefined) { - const now = new Date(); + var now = new Date(); expiresOn = new Date(now.getTime() + 3600 * 1000); } - const sas = generateAccountSASQueryParameters(Object.assign({ permissions, - expiresOn, - resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).toString(); + var sas = generateAccountSASQueryParameters(tslib.__assign({ permissions: permissions, + expiresOn: expiresOn, + resourceTypes: resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).toString(); return appendToURLQuery(this.url, sas); - } -} + }; + return BlobServiceClient; +}(StorageClient)); Object.defineProperty(exports, 'BaseRequestPolicy', { enumerable: true, @@ -87515,8 +91034,9 @@ module.exports = require("url"); /***/ }), /* 836 */, /* 837 */, -/* 838 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { +/* 838 */, +/* 839 */ +/***/ (function(__unusedmodule, exports) { "use strict"; @@ -87536,45 +91056,9 @@ module.exports = require("url"); * limitations under the License. */ Object.defineProperty(exports, "__esModule", { value: true }); -exports.deleteBaggage = exports.setBaggage = exports.getBaggage = void 0; -var context_1 = __webpack_require__(132); -/** - * Baggage key - */ -var BAGGAGE_KEY = context_1.createContextKey('OpenTelemetry Baggage Key'); -/** - * Retrieve the current baggage from the given context - * - * @param {Context} Context that manage all context values - * @returns {Baggage} Extracted baggage from the context - */ -function getBaggage(context) { - return context.getValue(BAGGAGE_KEY) || undefined; -} -exports.getBaggage = getBaggage; -/** - * Store a baggage in the given context - * - * @param {Context} Context that manage all context values - * @param {Baggage} baggage that will be set in the actual context - */ -function setBaggage(context, baggage) { - return context.setValue(BAGGAGE_KEY, baggage); -} -exports.setBaggage = setBaggage; -/** - * Delete the baggage stored in the given context - * - * @param {Context} Context that manage all context values - */ -function deleteBaggage(context) { - return context.deleteValue(BAGGAGE_KEY); -} -exports.deleteBaggage = deleteBaggage; -//# sourceMappingURL=context-helpers.js.map +//# sourceMappingURL=TimedEvent.js.map /***/ }), -/* 839 */, /* 840 */, /* 841 */ /***/ (function(module, __unusedexports, __webpack_require__) { @@ -87875,7 +91359,30 @@ exports.hashFiles = hashFiles; /***/ }), /* 850 */, -/* 851 */, +/* 851 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=link_context.js.map + +/***/ }), /* 852 */ /***/ (function(module, __unusedexports, __webpack_require__) { @@ -88374,63 +91881,7 @@ WebIDLAlgorithm_1.idl_defineConst(AttrImpl.prototype, "specified", true); /* 869 */, /* 870 */, /* 871 */, -/* 872 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.baggageEntryMetadataFromString = exports.createBaggage = void 0; -var __1 = __webpack_require__(440); -var baggage_impl_1 = __webpack_require__(666); -var symbol_1 = __webpack_require__(561); -/** - * Create a new Baggage with optional entries - * - * @param entries An array of baggage entries the new baggage should contain - */ -function createBaggage(entries) { - if (entries === void 0) { entries = {}; } - return new baggage_impl_1.BaggageImpl(new Map(Object.entries(entries))); -} -exports.createBaggage = createBaggage; -/** - * Create a serializable BaggageEntryMetadata object from a string. - * - * @param str string metadata. Format is currently not defined by the spec and has no special meaning. - * - */ -function baggageEntryMetadataFromString(str) { - if (typeof str !== 'string') { - __1.diag.error("Cannot create baggage metadata from unknown type: " + typeof str); - str = ''; - } - return { - __TYPE__: symbol_1.baggageEntryMetadataSymbol, - toString: function () { - return str; - }, - }; -} -exports.baggageEntryMetadataFromString = baggageEntryMetadataFromString; -//# sourceMappingURL=utils.js.map - -/***/ }), +/* 872 */, /* 873 */ /***/ (function(__unusedmodule, exports, __webpack_require__) { @@ -89323,11 +92774,9 @@ module.exports = compare */ Object.defineProperty(exports, "__esModule", { value: true }); exports.TraceAPI = void 0; -var global_utils_1 = __webpack_require__(525); var ProxyTracerProvider_1 = __webpack_require__(394); var spancontext_utils_1 = __webpack_require__(453); -var context_utils_1 = __webpack_require__(969); -var diag_1 = __webpack_require__(118); +var global_utils_1 = __webpack_require__(525); var API_NAME = 'trace'; /** * Singleton object which represents the entry point to the OpenTelemetry Tracing API @@ -89336,13 +92785,7 @@ var TraceAPI = /** @class */ (function () { /** Empty private constructor prevents end users from constructing a new instance of the API */ function TraceAPI() { this._proxyTracerProvider = new ProxyTracerProvider_1.ProxyTracerProvider(); - this.wrapSpanContext = spancontext_utils_1.wrapSpanContext; this.isSpanContextValid = spancontext_utils_1.isSpanContextValid; - this.deleteSpan = context_utils_1.deleteSpan; - this.getSpan = context_utils_1.getSpan; - this.getSpanContext = context_utils_1.getSpanContext; - this.setSpan = context_utils_1.setSpan; - this.setSpanContext = context_utils_1.setSpanContext; } /** Get the singleton instance of the Trace API */ TraceAPI.getInstance = function () { @@ -89352,16 +92795,12 @@ var TraceAPI = /** @class */ (function () { return this._instance; }; /** - * Set the current global tracer. - * - * @returns true if the tracer provider was successfully registered, else false + * Set the current global tracer. Returns the initialized global tracer provider */ TraceAPI.prototype.setGlobalTracerProvider = function (provider) { - var success = global_utils_1.registerGlobal(API_NAME, this._proxyTracerProvider, diag_1.DiagAPI.instance()); - if (success) { - this._proxyTracerProvider.setDelegate(provider); - } - return success; + this._proxyTracerProvider.setDelegate(provider); + global_utils_1.registerGlobal(API_NAME, this._proxyTracerProvider); + return this._proxyTracerProvider; }; /** * Returns the global tracer provider. @@ -89377,7 +92816,7 @@ var TraceAPI = /** @class */ (function () { }; /** Remove the global tracer provider */ TraceAPI.prototype.disable = function () { - global_utils_1.unregisterGlobal(API_NAME, diag_1.DiagAPI.instance()); + global_utils_1.unregisterGlobal(API_NAME); this._proxyTracerProvider = new ProxyTracerProvider_1.ProxyTracerProvider(); }; return TraceAPI; @@ -89492,30 +92931,7 @@ module.exports = (versions, range, options) => { /***/ }), /* 878 */, /* 879 */, -/* 880 */ -/***/ (function(__unusedmodule, exports) { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=types.js.map - -/***/ }), +/* 880 */, /* 881 */ /***/ (function(module, __unusedexports, __webpack_require__) { @@ -92451,7 +95867,7 @@ var __createBinding; ar[i] = from[i]; } } - return to.concat(ar || Array.prototype.slice.call(from)); + return to.concat(ar || from); }; __await = function (v) { @@ -96527,7 +99943,30 @@ WebIDLAlgorithm_1.idl_defineConst(NodeImpl.prototype, "DOCUMENT_POSITION_IMPLEME /***/ }), /* 936 */, /* 937 */, -/* 938 */, +/* 938 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=Baggage.js.map + +/***/ }), /* 939 */ /***/ (function(module, __unusedexports, __webpack_require__) { @@ -97235,86 +100674,7 @@ var EmptySetIterator = /** @class */ (function () { //# sourceMappingURL=EmptySet.js.map /***/ }), -/* 969 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.getSpanContext = exports.setSpanContext = exports.deleteSpan = exports.setSpan = exports.getSpan = void 0; -var context_1 = __webpack_require__(132); -var NonRecordingSpan_1 = __webpack_require__(437); -/** - * span key - */ -var SPAN_KEY = context_1.createContextKey('OpenTelemetry Context Key SPAN'); -/** - * Return the span if one exists - * - * @param context context to get span from - */ -function getSpan(context) { - return context.getValue(SPAN_KEY) || undefined; -} -exports.getSpan = getSpan; -/** - * Set the span on a context - * - * @param context context to use as parent - * @param span span to set active - */ -function setSpan(context, span) { - return context.setValue(SPAN_KEY, span); -} -exports.setSpan = setSpan; -/** - * Remove current span stored in the context - * - * @param context context to delete span from - */ -function deleteSpan(context) { - return context.deleteValue(SPAN_KEY); -} -exports.deleteSpan = deleteSpan; -/** - * Wrap span context in a NoopSpan and set as span in a new - * context - * - * @param context context to set active span on - * @param spanContext span context to be wrapped - */ -function setSpanContext(context, spanContext) { - return setSpan(context, new NonRecordingSpan_1.NonRecordingSpan(spanContext)); -} -exports.setSpanContext = setSpanContext; -/** - * Get the span context of the span if it exists. - * - * @param context context to get values from - */ -function getSpanContext(context) { - var _a; - return (_a = getSpan(context)) === null || _a === void 0 ? void 0 : _a.spanContext(); -} -exports.getSpanContext = getSpanContext; -//# sourceMappingURL=context-utils.js.map - -/***/ }), +/* 969 */, /* 970 */ /***/ (function(__unusedmodule, exports) { @@ -98449,7 +101809,7 @@ exports.exec = exec; Object.defineProperty(exports, "__esModule", { value: true }); exports.isCompatible = exports._makeCompatibilityCheck = void 0; var version_1 = __webpack_require__(133); -var re = /^(\d+)\.(\d+)\.(\d+)(-(.+))?$/; +var re = /^(\d+)\.(\d+)\.(\d+)(?:-(.*))?$/; /** * Create a function to test an API version to see if it is compatible with the provided ownVersion. * @@ -98478,14 +101838,7 @@ function _makeCompatibilityCheck(ownVersion) { major: +myVersionMatch[1], minor: +myVersionMatch[2], patch: +myVersionMatch[3], - prerelease: myVersionMatch[4], }; - // if ownVersion has a prerelease tag, versions must match exactly - if (ownVersionParsed.prerelease != null) { - return function isExactmatch(globalVersion) { - return globalVersion === ownVersion; - }; - } function _reject(v) { rejectedVersions.add(v); return false; @@ -98511,12 +101864,7 @@ function _makeCompatibilityCheck(ownVersion) { major: +globalVersionMatch[1], minor: +globalVersionMatch[2], patch: +globalVersionMatch[3], - prerelease: globalVersionMatch[4], }; - // if globalVersion has a prerelease tag, versions must match exactly - if (globalVersionParsed.prerelease != null) { - return _reject(globalVersion); - } // major versions must match if (ownVersionParsed.major !== globalVersionParsed.major) { return _reject(globalVersion); diff --git a/src/cache.ts b/src/cache.ts index d1571d947..fb97fb0b1 100644 --- a/src/cache.ts +++ b/src/cache.ts @@ -65,12 +65,11 @@ export async function restore(id: string) { core.debug(`primary key is ${primaryKey}`); core.saveState(STATE_CACHE_PRIMARY_KEY, primaryKey); if (primaryKey.endsWith('-')) { - core.warning( + throw new Error( `No file in ${process.cwd()} matched to [${ packageManager.pattern }], make sure you have checked out the target repository` ); - return; } const matchedKey = await cache.restoreCache(packageManager.path, primaryKey, [