diff --git a/build/index.js b/build/index.js
index 8b6c4973..213d5bc0 100644
--- a/build/index.js
+++ b/build/index.js
@@ -50,7 +50,16 @@ module.exports =
/******/ })
/************************************************************************/
/******/ ([
-/* 0 */,
+/* 0 */
+/***/ (function(module) {
+
+"use strict";
+
+// this exists so we can replace it during testing
+module.exports = setInterval
+
+
+/***/ }),
/* 1 */
/***/ (function(__unusedmodule, exports, __webpack_require__) {
@@ -2774,7 +2783,7 @@ Promise.spawn = function (generatorFunction) {
/**/
-var pna = __webpack_require__(822);
+var pna = __webpack_require__(78);
/**/
module.exports = Writable;
@@ -4043,210 +4052,99 @@ Object.defineProperty(exports, "__esModule", { value: true });
"use strict";
-// wrapper around mkdirp for tar's needs.
-
-// TODO: This should probably be a class, not functionally
-// passing around state in a gazillion args.
-
-const mkdirp = __webpack_require__(626)
-const fs = __webpack_require__(747)
-const path = __webpack_require__(622)
-const chownr = __webpack_require__(941)
+module.exports = RunQueue
-class SymlinkError extends Error {
- constructor (symlink, path) {
- super('Cannot extract through symbolic link')
- this.path = path
- this.symlink = symlink
- }
+var validate = __webpack_require__(904)
- get name () {
- return 'SylinkError'
- }
+function RunQueue (opts) {
+ validate('Z|O', [opts])
+ if (!opts) opts = {}
+ this.finished = false
+ this.inflight = 0
+ this.maxConcurrency = opts.maxConcurrency || 1
+ this.queued = 0
+ this.queue = []
+ this.currentPrio = null
+ this.currentQueue = null
+ this.Promise = opts.Promise || global.Promise
+ this.deferred = {}
}
-class CwdError extends Error {
- constructor (path, code) {
- super(code + ': Cannot cd into \'' + path + '\'')
- this.path = path
- this.code = code
- }
+RunQueue.prototype = {}
- get name () {
- return 'CwdError'
+RunQueue.prototype.run = function () {
+ if (arguments.length !== 0) throw new Error('RunQueue.run takes no arguments')
+ var self = this
+ var deferred = this.deferred
+ if (!deferred.promise) {
+ deferred.promise = new this.Promise(function (resolve, reject) {
+ deferred.resolve = resolve
+ deferred.reject = reject
+ self._runQueue()
+ })
}
+ return deferred.promise
}
-const mkdir = module.exports = (dir, opt, cb) => {
- // if there's any overlap between mask and mode,
- // then we'll need an explicit chmod
- const umask = opt.umask
- const mode = opt.mode | 0o0700
- const needChmod = (mode & umask) !== 0
-
- const uid = opt.uid
- const gid = opt.gid
- const doChown = typeof uid === 'number' &&
- typeof gid === 'number' &&
- ( uid !== opt.processUid || gid !== opt.processGid )
-
- const preserve = opt.preserve
- const unlink = opt.unlink
- const cache = opt.cache
- const cwd = opt.cwd
+RunQueue.prototype._runQueue = function () {
+ var self = this
- const done = (er, created) => {
- if (er)
- cb(er)
- else {
- cache.set(dir, true)
- if (created && doChown)
- chownr(created, uid, gid, er => done(er))
- else if (needChmod)
- fs.chmod(dir, mode, cb)
- else
- cb()
+ while ((this.inflight < this.maxConcurrency) && this.queued) {
+ if (!this.currentQueue || this.currentQueue.length === 0) {
+ // wait till the current priority is entirely processed before
+ // starting a new one
+ if (this.inflight) return
+ var prios = Object.keys(this.queue)
+ for (var ii = 0; ii < prios.length; ++ii) {
+ var prioQueue = this.queue[prios[ii]]
+ if (prioQueue.length) {
+ this.currentQueue = prioQueue
+ this.currentPrio = prios[ii]
+ break
+ }
+ }
}
- }
- if (cache && cache.get(dir) === true)
- return done()
+ --this.queued
+ ++this.inflight
+ var next = this.currentQueue.shift()
+ var args = next.args || []
- if (dir === cwd)
- return fs.stat(dir, (er, st) => {
- if (er || !st.isDirectory())
- er = new CwdError(dir, er && er.code || 'ENOTDIR')
- done(er)
+ // we explicitly construct a promise here so that queue items can throw
+ // or immediately return to resolve
+ var queueEntry = new this.Promise(function (resolve) {
+ resolve(next.cmd.apply(null, args))
})
- if (preserve)
- return mkdirp(dir, mode, done)
-
- const sub = path.relative(cwd, dir)
- const parts = sub.split(/\/|\\/)
- mkdir_(cwd, parts, mode, cache, unlink, cwd, null, done)
-}
-
-const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
- if (!parts.length)
- return cb(null, created)
- const p = parts.shift()
- const part = base + '/' + p
- if (cache.get(part))
- return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
- fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
-}
-
-const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => {
- if (er) {
- if (er.path && path.dirname(er.path) === cwd &&
- (er.code === 'ENOTDIR' || er.code === 'ENOENT'))
- return cb(new CwdError(cwd, er.code))
-
- fs.lstat(part, (statEr, st) => {
- if (statEr)
- cb(statEr)
- else if (st.isDirectory())
- mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
- else if (unlink)
- fs.unlink(part, er => {
- if (er)
- return cb(er)
- fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
- })
- else if (st.isSymbolicLink())
- return cb(new SymlinkError(part, part + '/' + parts.join('/')))
- else
- cb(er)
+ queueEntry.then(function () {
+ --self.inflight
+ if (self.finished) return
+ if (self.queued <= 0 && self.inflight <= 0) {
+ self.finished = true
+ self.deferred.resolve()
+ }
+ self._runQueue()
+ }, function (err) {
+ self.finished = true
+ self.deferred.reject(err)
})
- } else {
- created = created || part
- mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
}
}
-const mkdirSync = module.exports.sync = (dir, opt) => {
- // if there's any overlap between mask and mode,
- // then we'll need an explicit chmod
- const umask = opt.umask
- const mode = opt.mode | 0o0700
- const needChmod = (mode & umask) !== 0
-
- const uid = opt.uid
- const gid = opt.gid
- const doChown = typeof uid === 'number' &&
- typeof gid === 'number' &&
- ( uid !== opt.processUid || gid !== opt.processGid )
-
- const preserve = opt.preserve
- const unlink = opt.unlink
- const cache = opt.cache
- const cwd = opt.cwd
-
- const done = (created) => {
- cache.set(dir, true)
- if (created && doChown)
- chownr.sync(created, uid, gid)
- if (needChmod)
- fs.chmodSync(dir, mode)
- }
-
- if (cache && cache.get(dir) === true)
- return done()
-
- if (dir === cwd) {
- let ok = false
- let code = 'ENOTDIR'
- try {
- ok = fs.statSync(dir).isDirectory()
- } catch (er) {
- code = er.code
- } finally {
- if (!ok)
- throw new CwdError(dir, code)
- }
- done()
- return
- }
-
- if (preserve)
- return done(mkdirp.sync(dir, mode))
-
- const sub = path.relative(cwd, dir)
- const parts = sub.split(/\/|\\/)
- let created = null
- for (let p = parts.shift(), part = cwd;
- p && (part += '/' + p);
- p = parts.shift()) {
-
- if (cache.get(part))
- continue
-
- try {
- fs.mkdirSync(part, mode)
- created = created || part
- cache.set(part, true)
- } catch (er) {
- if (er.path && path.dirname(er.path) === cwd &&
- (er.code === 'ENOTDIR' || er.code === 'ENOENT'))
- return new CwdError(cwd, er.code)
-
- const st = fs.lstatSync(part)
- if (st.isDirectory()) {
- cache.set(part, true)
- continue
- } else if (unlink) {
- fs.unlinkSync(part)
- fs.mkdirSync(part, mode)
- created = created || part
- cache.set(part, true)
- continue
- } else if (st.isSymbolicLink())
- return new SymlinkError(part, part + '/' + parts.join('/'))
- }
+RunQueue.prototype.add = function (prio, cmd, args) {
+ if (this.finished) throw new Error("Can't add to a finished queue. Create a new queue.")
+ if (Math.abs(Math.floor(prio)) !== prio) throw new Error('Priorities must be a positive integer value.')
+ validate('NFA|NFZ', [prio, cmd, args])
+ prio = Number(prio)
+ if (!this.queue[prio]) this.queue[prio] = []
+ ++this.queued
+ this.queue[prio].push({cmd: cmd, args: args})
+ // if this priority is higher than the one we're currently processing,
+ // switch back to processing its queue.
+ if (this.currentPrio > prio) {
+ this.currentQueue = this.queue[prio]
+ this.currentPrio = prio
}
-
- return done(created)
}
@@ -4325,7 +4223,7 @@ module.exports = lockVerify
const fs = __webpack_require__(747)
const path = __webpack_require__(622)
const npa = __webpack_require__(482)
-const semver = __webpack_require__(280)
+const semver = __webpack_require__(957)
function lockVerify(check) {
if (!check) check = '.'
@@ -4405,7 +4303,12 @@ function readJson (file) {
/***/ }),
/* 46 */,
/* 47 */,
-/* 48 */,
+/* 48 */
+/***/ (function(module) {
+
+module.exports = ["0BSD","AAL","ADSL","AFL-1.1","AFL-1.2","AFL-2.0","AFL-2.1","AFL-3.0","AGPL-1.0-only","AGPL-1.0-or-later","AGPL-3.0-only","AGPL-3.0-or-later","AMDPLPA","AML","AMPAS","ANTLR-PD","APAFML","APL-1.0","APSL-1.0","APSL-1.1","APSL-1.2","APSL-2.0","Abstyles","Adobe-2006","Adobe-Glyph","Afmparse","Aladdin","Apache-1.0","Apache-1.1","Apache-2.0","Artistic-1.0","Artistic-1.0-Perl","Artistic-1.0-cl8","Artistic-2.0","BSD-1-Clause","BSD-2-Clause","BSD-2-Clause-Patent","BSD-2-Clause-Views","BSD-3-Clause","BSD-3-Clause-Attribution","BSD-3-Clause-Clear","BSD-3-Clause-LBNL","BSD-3-Clause-No-Nuclear-License","BSD-3-Clause-No-Nuclear-License-2014","BSD-3-Clause-No-Nuclear-Warranty","BSD-3-Clause-Open-MPI","BSD-4-Clause","BSD-4-Clause-UC","BSD-Protection","BSD-Source-Code","BSL-1.0","Bahyph","Barr","Beerware","BitTorrent-1.0","BitTorrent-1.1","BlueOak-1.0.0","Borceux","CAL-1.0","CAL-1.0-Combined-Work-Exception","CATOSL-1.1","CC-BY-1.0","CC-BY-2.0","CC-BY-2.5","CC-BY-3.0","CC-BY-3.0-AT","CC-BY-4.0","CC-BY-NC-1.0","CC-BY-NC-2.0","CC-BY-NC-2.5","CC-BY-NC-3.0","CC-BY-NC-4.0","CC-BY-NC-ND-1.0","CC-BY-NC-ND-2.0","CC-BY-NC-ND-2.5","CC-BY-NC-ND-3.0","CC-BY-NC-ND-3.0-IGO","CC-BY-NC-ND-4.0","CC-BY-NC-SA-1.0","CC-BY-NC-SA-2.0","CC-BY-NC-SA-2.5","CC-BY-NC-SA-3.0","CC-BY-NC-SA-4.0","CC-BY-ND-1.0","CC-BY-ND-2.0","CC-BY-ND-2.5","CC-BY-ND-3.0","CC-BY-ND-4.0","CC-BY-SA-1.0","CC-BY-SA-2.0","CC-BY-SA-2.5","CC-BY-SA-3.0","CC-BY-SA-3.0-AT","CC-BY-SA-4.0","CC-PDDC","CC0-1.0","CDDL-1.0","CDDL-1.1","CDLA-Permissive-1.0","CDLA-Sharing-1.0","CECILL-1.0","CECILL-1.1","CECILL-2.0","CECILL-2.1","CECILL-B","CECILL-C","CERN-OHL-1.1","CERN-OHL-1.2","CERN-OHL-P-2.0","CERN-OHL-S-2.0","CERN-OHL-W-2.0","CNRI-Jython","CNRI-Python","CNRI-Python-GPL-Compatible","CPAL-1.0","CPL-1.0","CPOL-1.02","CUA-OPL-1.0","Caldera","ClArtistic","Condor-1.1","Crossword","CrystalStacker","Cube","D-FSL-1.0","DOC","DSDP","Dotseqn","ECL-1.0","ECL-2.0","EFL-1.0","EFL-2.0","EPICS","EPL-1.0","EPL-2.0","EUDatagrid","EUPL-1.0","EUPL-1.1","EUPL-1.2","Entessa","ErlPL-1.1","Eurosym","FSFAP","FSFUL","FSFULLR","FTL","Fair","Frameworx-1.0","FreeImage","GFDL-1.1-invariants-only","GFDL-1.1-invariants-or-later","GFDL-1.1-no-invariants-only","GFDL-1.1-no-invariants-or-later","GFDL-1.1-only","GFDL-1.1-or-later","GFDL-1.2-invariants-only","GFDL-1.2-invariants-or-later","GFDL-1.2-no-invariants-only","GFDL-1.2-no-invariants-or-later","GFDL-1.2-only","GFDL-1.2-or-later","GFDL-1.3-invariants-only","GFDL-1.3-invariants-or-later","GFDL-1.3-no-invariants-only","GFDL-1.3-no-invariants-or-later","GFDL-1.3-only","GFDL-1.3-or-later","GL2PS","GLWTPL","GPL-1.0-only","GPL-1.0-or-later","GPL-2.0-only","GPL-2.0-or-later","GPL-3.0-only","GPL-3.0-or-later","Giftware","Glide","Glulxe","HPND","HPND-sell-variant","HaskellReport","Hippocratic-2.1","IBM-pibs","ICU","IJG","IPA","IPL-1.0","ISC","ImageMagick","Imlib2","Info-ZIP","Intel","Intel-ACPI","Interbase-1.0","JPNIC","JSON","JasPer-2.0","LAL-1.2","LAL-1.3","LGPL-2.0-only","LGPL-2.0-or-later","LGPL-2.1-only","LGPL-2.1-or-later","LGPL-3.0-only","LGPL-3.0-or-later","LGPLLR","LPL-1.0","LPL-1.02","LPPL-1.0","LPPL-1.1","LPPL-1.2","LPPL-1.3a","LPPL-1.3c","Latex2e","Leptonica","LiLiQ-P-1.1","LiLiQ-R-1.1","LiLiQ-Rplus-1.1","Libpng","Linux-OpenIB","MIT","MIT-0","MIT-CMU","MIT-advertising","MIT-enna","MIT-feh","MITNFA","MPL-1.0","MPL-1.1","MPL-2.0","MPL-2.0-no-copyleft-exception","MS-PL","MS-RL","MTLL","MakeIndex","MirOS","Motosoto","MulanPSL-1.0","MulanPSL-2.0","Multics","Mup","NASA-1.3","NBPL-1.0","NCGL-UK-2.0","NCSA","NGPL","NIST-PD","NIST-PD-fallback","NLOD-1.0","NLPL","NOSL","NPL-1.0","NPL-1.1","NPOSL-3.0","NRL","NTP","NTP-0","Naumen","Net-SNMP","NetCDF","Newsletr","Nokia","Noweb","O-UDA-1.0","OCCT-PL","OCLC-2.0","ODC-By-1.0","ODbL-1.0","OFL-1.0","OFL-1.0-RFN","OFL-1.0-no-RFN","OFL-1.1","OFL-1.1-RFN","OFL-1.1-no-RFN","OGC-1.0","OGL-Canada-2.0","OGL-UK-1.0","OGL-UK-2.0","OGL-UK-3.0","OGTSL","OLDAP-1.1","OLDAP-1.2","OLDAP-1.3","OLDAP-1.4","OLDAP-2.0","OLDAP-2.0.1","OLDAP-2.1","OLDAP-2.2","OLDAP-2.2.1","OLDAP-2.2.2","OLDAP-2.3","OLDAP-2.4","OLDAP-2.5","OLDAP-2.6","OLDAP-2.7","OLDAP-2.8","OML","OPL-1.0","OSET-PL-2.1","OSL-1.0","OSL-1.1","OSL-2.0","OSL-2.1","OSL-3.0","OpenSSL","PDDL-1.0","PHP-3.0","PHP-3.01","PSF-2.0","Parity-6.0.0","Parity-7.0.0","Plexus","PolyForm-Noncommercial-1.0.0","PolyForm-Small-Business-1.0.0","PostgreSQL","Python-2.0","QPL-1.0","Qhull","RHeCos-1.1","RPL-1.1","RPL-1.5","RPSL-1.0","RSA-MD","RSCPL","Rdisc","Ruby","SAX-PD","SCEA","SGI-B-1.0","SGI-B-1.1","SGI-B-2.0","SHL-0.5","SHL-0.51","SISSL","SISSL-1.2","SMLNJ","SMPPL","SNIA","SPL-1.0","SSH-OpenSSH","SSH-short","SSPL-1.0","SWL","Saxpath","Sendmail","Sendmail-8.23","SimPL-2.0","Sleepycat","Spencer-86","Spencer-94","Spencer-99","SugarCRM-1.1.3","TAPR-OHL-1.0","TCL","TCP-wrappers","TMate","TORQUE-1.1","TOSL","TU-Berlin-1.0","TU-Berlin-2.0","UCL-1.0","UPL-1.0","Unicode-DFS-2015","Unicode-DFS-2016","Unicode-TOU","Unlicense","VOSTROM","VSL-1.0","Vim","W3C","W3C-19980720","W3C-20150513","WTFPL","Watcom-1.0","Wsuipa","X11","XFree86-1.1","XSkat","Xerox","Xnet","YPL-1.0","YPL-1.1","ZPL-1.1","ZPL-2.0","ZPL-2.1","Zed","Zend-2.0","Zimbra-1.3","Zimbra-1.4","Zlib","blessing","bzip2-1.0.5","bzip2-1.0.6","copyleft-next-0.3.0","copyleft-next-0.3.1","curl","diffmark","dvipdfm","eGenix","etalab-2.0","gSOAP-1.3b","gnuplot","iMatix","libpng-2.0","libselinux-1.0","libtiff","mpich2","psfrag","psutils","xinetd","xpp","zlib-acknowledgement"];
+
+/***/ }),
/* 49 */
/***/ (function(module, __unusedexports, __webpack_require__) {
@@ -4472,8 +4375,8 @@ module.exports = {
parseArg: __webpack_require__(508),
readJSON: __webpack_require__(218),
logicalTree: __webpack_require__(4),
- getPrefix: __webpack_require__(367),
- verifyLock: __webpack_require__(873),
+ getPrefix: __webpack_require__(995),
+ verifyLock: __webpack_require__(564),
stringifyPackage: __webpack_require__(572),
manifest: __webpack_require__(638),
tarball: __webpack_require__(915),
@@ -4483,12 +4386,12 @@ module.exports = {
access: __webpack_require__(704),
search: __webpack_require__(709),
team: __webpack_require__(449),
- org: __webpack_require__(714),
+ org: __webpack_require__(823),
fetch: __webpack_require__(270),
login: __webpack_require__(838),
adduser: __webpack_require__(495),
profile: __webpack_require__(244),
- publish: __webpack_require__(760),
+ publish: __webpack_require__(641),
unpublish: __webpack_require__(671),
runScript: __webpack_require__(85),
log: __webpack_require__(230),
@@ -4551,9 +4454,9 @@ exports.NoopContextManager = NoopContextManager;
"use strict";
-const figgyPudding = __webpack_require__(122)
+const figgyPudding = __webpack_require__(965)
const npa = __webpack_require__(482)
-const semver = __webpack_require__(280)
+const semver = __webpack_require__(798)
const PickerOpts = figgyPudding({
defaultTag: { default: 'latest' },
@@ -4689,12359 +4592,8963 @@ function pickManifest (packument, wanted, opts) {
/***/ }),
/* 56 */,
-/* 57 */,
-/* 58 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+/* 57 */
+/***/ (function(module, exports) {
-module.exports = __webpack_require__(600);
+exports = module.exports = SemVer
-/***/ }),
-/* 59 */,
-/* 60 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+var debug
+/* istanbul ignore next */
+if (typeof process === 'object' &&
+ process.env &&
+ process.env.NODE_DEBUG &&
+ /\bsemver\b/i.test(process.env.NODE_DEBUG)) {
+ debug = function () {
+ var args = Array.prototype.slice.call(arguments, 0)
+ args.unshift('SEMVER')
+ console.log.apply(console, args)
+ }
+} else {
+ debug = function () {}
+}
-// Update with any zlib constants that are added or changed in the future.
-// Node v6 didn't export this, so we just hard code the version and rely
-// on all the other hard-coded values from zlib v4736. When node v6
-// support drops, we can just export the realZlibConstants object.
-const realZlibConstants = __webpack_require__(761).constants ||
- /* istanbul ignore next */ { ZLIB_VERNUM: 4736 }
+// Note: this is the semver.org version of the spec that it implements
+// Not necessarily the package version of this code.
+exports.SEMVER_SPEC_VERSION = '2.0.0'
-module.exports = Object.freeze(Object.assign(Object.create(null), {
- Z_NO_FLUSH: 0,
- Z_PARTIAL_FLUSH: 1,
- Z_SYNC_FLUSH: 2,
- Z_FULL_FLUSH: 3,
- Z_FINISH: 4,
- Z_BLOCK: 5,
- Z_OK: 0,
- Z_STREAM_END: 1,
- Z_NEED_DICT: 2,
- Z_ERRNO: -1,
- Z_STREAM_ERROR: -2,
- Z_DATA_ERROR: -3,
- Z_MEM_ERROR: -4,
- Z_BUF_ERROR: -5,
- Z_VERSION_ERROR: -6,
- Z_NO_COMPRESSION: 0,
- Z_BEST_SPEED: 1,
- Z_BEST_COMPRESSION: 9,
- Z_DEFAULT_COMPRESSION: -1,
- Z_FILTERED: 1,
- Z_HUFFMAN_ONLY: 2,
- Z_RLE: 3,
- Z_FIXED: 4,
- Z_DEFAULT_STRATEGY: 0,
- DEFLATE: 1,
- INFLATE: 2,
- GZIP: 3,
- GUNZIP: 4,
- DEFLATERAW: 5,
- INFLATERAW: 6,
- UNZIP: 7,
- BROTLI_DECODE: 8,
- BROTLI_ENCODE: 9,
- Z_MIN_WINDOWBITS: 8,
- Z_MAX_WINDOWBITS: 15,
- Z_DEFAULT_WINDOWBITS: 15,
- Z_MIN_CHUNK: 64,
- Z_MAX_CHUNK: Infinity,
- Z_DEFAULT_CHUNK: 16384,
- Z_MIN_MEMLEVEL: 1,
- Z_MAX_MEMLEVEL: 9,
- Z_DEFAULT_MEMLEVEL: 8,
- Z_MIN_LEVEL: -1,
- Z_MAX_LEVEL: 9,
- Z_DEFAULT_LEVEL: -1,
- BROTLI_OPERATION_PROCESS: 0,
- BROTLI_OPERATION_FLUSH: 1,
- BROTLI_OPERATION_FINISH: 2,
- BROTLI_OPERATION_EMIT_METADATA: 3,
- BROTLI_MODE_GENERIC: 0,
- BROTLI_MODE_TEXT: 1,
- BROTLI_MODE_FONT: 2,
- BROTLI_DEFAULT_MODE: 0,
- BROTLI_MIN_QUALITY: 0,
- BROTLI_MAX_QUALITY: 11,
- BROTLI_DEFAULT_QUALITY: 11,
- BROTLI_MIN_WINDOW_BITS: 10,
- BROTLI_MAX_WINDOW_BITS: 24,
- BROTLI_LARGE_MAX_WINDOW_BITS: 30,
- BROTLI_DEFAULT_WINDOW: 22,
- BROTLI_MIN_INPUT_BLOCK_BITS: 16,
- BROTLI_MAX_INPUT_BLOCK_BITS: 24,
- BROTLI_PARAM_MODE: 0,
- BROTLI_PARAM_QUALITY: 1,
- BROTLI_PARAM_LGWIN: 2,
- BROTLI_PARAM_LGBLOCK: 3,
- BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
- BROTLI_PARAM_SIZE_HINT: 5,
- BROTLI_PARAM_LARGE_WINDOW: 6,
- BROTLI_PARAM_NPOSTFIX: 7,
- BROTLI_PARAM_NDIRECT: 8,
- BROTLI_DECODER_RESULT_ERROR: 0,
- BROTLI_DECODER_RESULT_SUCCESS: 1,
- BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
- BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
- BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
- BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
- BROTLI_DECODER_NO_ERROR: 0,
- BROTLI_DECODER_SUCCESS: 1,
- BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
- BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
- BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
- BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
- BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
- BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
- BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
- BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
- BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
- BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
- BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
- BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
- BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
- BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
- BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
- BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
- BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
- BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
- BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
- BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
- BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
- BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
- BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
- BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
- BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
- BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
- BROTLI_DECODER_ERROR_UNREACHABLE: -31,
-}, realZlibConstants))
+var MAX_LENGTH = 256
+var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
+ /* istanbul ignore next */ 9007199254740991
+// Max safe segment length for coercion.
+var MAX_SAFE_COMPONENT_LENGTH = 16
-/***/ }),
-/* 61 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+// The actual regexps go on exports.re
+var re = exports.re = []
+var src = exports.src = []
+var R = 0
-"use strict";
+// The following Regular Expressions can be used for tokenizing,
+// validating, and parsing SemVer version strings.
-var numberIsNan = __webpack_require__(530);
+// ## Numeric Identifier
+// A single `0`, or a non-zero digit followed by zero or more digits.
-module.exports = function (x) {
- if (numberIsNan(x)) {
- return false;
- }
+var NUMERICIDENTIFIER = R++
+src[NUMERICIDENTIFIER] = '0|[1-9]\\d*'
+var NUMERICIDENTIFIERLOOSE = R++
+src[NUMERICIDENTIFIERLOOSE] = '[0-9]+'
- // https://github.com/nodejs/io.js/blob/cff7300a578be1b10001f2d967aaedc88aee6402/lib/readline.js#L1369
+// ## Non-numeric Identifier
+// Zero or more digits, followed by a letter or hyphen, and then zero or
+// more letters, digits, or hyphens.
- // code points are derived from:
- // http://www.unix.org/Public/UNIDATA/EastAsianWidth.txt
- if (x >= 0x1100 && (
- x <= 0x115f || // Hangul Jamo
- 0x2329 === x || // LEFT-POINTING ANGLE BRACKET
- 0x232a === x || // RIGHT-POINTING ANGLE BRACKET
- // CJK Radicals Supplement .. Enclosed CJK Letters and Months
- (0x2e80 <= x && x <= 0x3247 && x !== 0x303f) ||
- // Enclosed CJK Letters and Months .. CJK Unified Ideographs Extension A
- 0x3250 <= x && x <= 0x4dbf ||
- // CJK Unified Ideographs .. Yi Radicals
- 0x4e00 <= x && x <= 0xa4c6 ||
- // Hangul Jamo Extended-A
- 0xa960 <= x && x <= 0xa97c ||
- // Hangul Syllables
- 0xac00 <= x && x <= 0xd7a3 ||
- // CJK Compatibility Ideographs
- 0xf900 <= x && x <= 0xfaff ||
- // Vertical Forms
- 0xfe10 <= x && x <= 0xfe19 ||
- // CJK Compatibility Forms .. Small Form Variants
- 0xfe30 <= x && x <= 0xfe6b ||
- // Halfwidth and Fullwidth Forms
- 0xff01 <= x && x <= 0xff60 ||
- 0xffe0 <= x && x <= 0xffe6 ||
- // Kana Supplement
- 0x1b000 <= x && x <= 0x1b001 ||
- // Enclosed Ideographic Supplement
- 0x1f200 <= x && x <= 0x1f251 ||
- // CJK Unified Ideographs Extension B .. Tertiary Ideographic Plane
- 0x20000 <= x && x <= 0x3fffd)) {
- return true;
- }
+var NONNUMERICIDENTIFIER = R++
+src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
- return false;
-}
+// ## Main Version
+// Three dot-separated numeric identifiers.
+var MAINVERSION = R++
+src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' +
+ '(' + src[NUMERICIDENTIFIER] + ')\\.' +
+ '(' + src[NUMERICIDENTIFIER] + ')'
-/***/ }),
-/* 62 */
-/***/ (function(__unusedmodule, exports) {
+var MAINVERSIONLOOSE = R++
+src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
+ '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
+ '(' + src[NUMERICIDENTIFIERLOOSE] + ')'
-exports.parse = exports.decode = decode
+// ## Pre-release Version Identifier
+// A numeric identifier, or a non-numeric identifier.
-exports.stringify = exports.encode = encode
+var PRERELEASEIDENTIFIER = R++
+src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] +
+ '|' + src[NONNUMERICIDENTIFIER] + ')'
-exports.safe = safe
-exports.unsafe = unsafe
+var PRERELEASEIDENTIFIERLOOSE = R++
+src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] +
+ '|' + src[NONNUMERICIDENTIFIER] + ')'
-var eol = typeof process !== 'undefined' &&
- process.platform === 'win32' ? '\r\n' : '\n'
+// ## Pre-release Version
+// Hyphen, followed by one or more dot-separated pre-release version
+// identifiers.
-function encode (obj, opt) {
- var children = []
- var out = ''
+var PRERELEASE = R++
+src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] +
+ '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))'
- if (typeof opt === 'string') {
- opt = {
- section: opt,
- whitespace: false
- }
- } else {
- opt = opt || {}
- opt.whitespace = opt.whitespace === true
- }
+var PRERELEASELOOSE = R++
+src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] +
+ '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))'
- var separator = opt.whitespace ? ' = ' : '='
+// ## Build Metadata Identifier
+// Any combination of digits, letters, or hyphens.
- Object.keys(obj).forEach(function (k, _, __) {
- var val = obj[k]
- if (val && Array.isArray(val)) {
- val.forEach(function (item) {
- out += safe(k + '[]') + separator + safe(item) + '\n'
- })
- } else if (val && typeof val === 'object') {
- children.push(k)
- } else {
- out += safe(k) + separator + safe(val) + eol
- }
- })
+var BUILDIDENTIFIER = R++
+src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+'
- if (opt.section && out.length) {
- out = '[' + safe(opt.section) + ']' + eol + out
- }
+// ## Build Metadata
+// Plus sign, followed by one or more period-separated build metadata
+// identifiers.
- children.forEach(function (k, _, __) {
- var nk = dotSplit(k).join('\\.')
- var section = (opt.section ? opt.section + '.' : '') + nk
- var child = encode(obj[k], {
- section: section,
- whitespace: opt.whitespace
- })
- if (out.length && child.length) {
- out += eol
- }
- out += child
- })
+var BUILD = R++
+src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] +
+ '(?:\\.' + src[BUILDIDENTIFIER] + ')*))'
- return out
-}
+// ## Full Version String
+// A main version, followed optionally by a pre-release version and
+// build metadata.
-function dotSplit (str) {
- return str.replace(/\1/g, '\u0002LITERAL\\1LITERAL\u0002')
- .replace(/\\\./g, '\u0001')
- .split(/\./).map(function (part) {
- return part.replace(/\1/g, '\\.')
- .replace(/\2LITERAL\\1LITERAL\2/g, '\u0001')
- })
-}
+// Note that the only major, minor, patch, and pre-release sections of
+// the version string are capturing groups. The build metadata is not a
+// capturing group, because it should not ever be used in version
+// comparison.
-function decode (str) {
- var out = {}
- var p = out
- var section = null
- // section |key = value
- var re = /^\[([^\]]*)\]$|^([^=]+)(=(.*))?$/i
- var lines = str.split(/[\r\n]+/g)
+var FULL = R++
+var FULLPLAIN = 'v?' + src[MAINVERSION] +
+ src[PRERELEASE] + '?' +
+ src[BUILD] + '?'
- lines.forEach(function (line, _, __) {
- if (!line || line.match(/^\s*[;#]/)) return
- var match = line.match(re)
- if (!match) return
- if (match[1] !== undefined) {
- section = unsafe(match[1])
- p = out[section] = out[section] || {}
- return
- }
- var key = unsafe(match[2])
- var value = match[3] ? unsafe(match[4]) : true
- switch (value) {
- case 'true':
- case 'false':
- case 'null': value = JSON.parse(value)
- }
+src[FULL] = '^' + FULLPLAIN + '$'
- // Convert keys with '[]' suffix to an array
- if (key.length > 2 && key.slice(-2) === '[]') {
- key = key.substring(0, key.length - 2)
- if (!p[key]) {
- p[key] = []
- } else if (!Array.isArray(p[key])) {
- p[key] = [p[key]]
- }
- }
+// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
+// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
+// common in the npm registry.
+var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] +
+ src[PRERELEASELOOSE] + '?' +
+ src[BUILD] + '?'
- // safeguard against resetting a previously defined
- // array by accidentally forgetting the brackets
- if (Array.isArray(p[key])) {
- p[key].push(value)
- } else {
- p[key] = value
- }
- })
+var LOOSE = R++
+src[LOOSE] = '^' + LOOSEPLAIN + '$'
- // {a:{y:1},"a.b":{x:2}} --> {a:{y:1,b:{x:2}}}
- // use a filter to return the keys that have to be deleted.
- Object.keys(out).filter(function (k, _, __) {
- if (!out[k] ||
- typeof out[k] !== 'object' ||
- Array.isArray(out[k])) {
- return false
- }
- // see if the parent section is also an object.
- // if so, add it to that, and mark this one for deletion
- var parts = dotSplit(k)
- var p = out
- var l = parts.pop()
- var nl = l.replace(/\\\./g, '.')
- parts.forEach(function (part, _, __) {
- if (!p[part] || typeof p[part] !== 'object') p[part] = {}
- p = p[part]
- })
- if (p === out && nl === l) {
- return false
- }
- p[nl] = out[k]
- return true
- }).forEach(function (del, _, __) {
- delete out[del]
- })
+var GTLT = R++
+src[GTLT] = '((?:<|>)?=?)'
- return out
-}
+// Something like "2.*" or "1.2.x".
+// Note that "x.x" is a valid xRange identifer, meaning "any version"
+// Only the first item is strictly required.
+var XRANGEIDENTIFIERLOOSE = R++
+src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*'
+var XRANGEIDENTIFIER = R++
+src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*'
-function isQuoted (val) {
- return (val.charAt(0) === '"' && val.slice(-1) === '"') ||
- (val.charAt(0) === "'" && val.slice(-1) === "'")
-}
+var XRANGEPLAIN = R++
+src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' +
+ '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
+ '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
+ '(?:' + src[PRERELEASE] + ')?' +
+ src[BUILD] + '?' +
+ ')?)?'
-function safe (val) {
- return (typeof val !== 'string' ||
- val.match(/[=\r\n]/) ||
- val.match(/^\[/) ||
- (val.length > 1 &&
- isQuoted(val)) ||
- val !== val.trim())
- ? JSON.stringify(val)
- : val.replace(/;/g, '\\;').replace(/#/g, '\\#')
-}
+var XRANGEPLAINLOOSE = R++
+src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
+ '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
+ '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
+ '(?:' + src[PRERELEASELOOSE] + ')?' +
+ src[BUILD] + '?' +
+ ')?)?'
-function unsafe (val, doUnesc) {
- val = (val || '').trim()
- if (isQuoted(val)) {
- // remove the single quotes before calling JSON.parse
- if (val.charAt(0) === "'") {
- val = val.substr(1, val.length - 2)
- }
- try { val = JSON.parse(val) } catch (_) {}
- } else {
- // walk the val to find the first not-escaped ; character
- var esc = false
- var unesc = ''
- for (var i = 0, l = val.length; i < l; i++) {
- var c = val.charAt(i)
- if (esc) {
- if ('\\;#'.indexOf(c) !== -1) {
- unesc += c
- } else {
- unesc += '\\' + c
- }
- esc = false
- } else if (';#'.indexOf(c) !== -1) {
- break
- } else if (c === '\\') {
- esc = true
- } else {
- unesc += c
- }
- }
- if (esc) {
- unesc += '\\'
- }
- return unesc.trim()
- }
- return val
-}
+var XRANGE = R++
+src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$'
+var XRANGELOOSE = R++
+src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$'
+// Coercion.
+// Extract anything that could conceivably be a part of a valid semver
+var COERCE = R++
+src[COERCE] = '(?:^|[^\\d])' +
+ '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' +
+ '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
+ '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
+ '(?:$|[^\\d])'
-/***/ }),
-/* 63 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+// Tilde ranges.
+// Meaning is "reasonably at or greater than"
+var LONETILDE = R++
+src[LONETILDE] = '(?:~>?)'
-"use strict";
+var TILDETRIM = R++
+src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+'
+re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g')
+var tildeTrimReplace = '$1~'
+var TILDE = R++
+src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$'
+var TILDELOOSE = R++
+src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$'
-const assert = __webpack_require__(357)
-const EE = __webpack_require__(614).EventEmitter
-const Parser = __webpack_require__(203)
-const fs = __webpack_require__(747)
-const fsm = __webpack_require__(827)
-const path = __webpack_require__(622)
-const mkdir = __webpack_require__(34)
-const mkdirSync = mkdir.sync
-const wc = __webpack_require__(478)
+// Caret ranges.
+// Meaning is "at least and backwards compatible with"
+var LONECARET = R++
+src[LONECARET] = '(?:\\^)'
-const ONENTRY = Symbol('onEntry')
-const CHECKFS = Symbol('checkFs')
-const ISREUSABLE = Symbol('isReusable')
-const MAKEFS = Symbol('makeFs')
-const FILE = Symbol('file')
-const DIRECTORY = Symbol('directory')
-const LINK = Symbol('link')
-const SYMLINK = Symbol('symlink')
-const HARDLINK = Symbol('hardlink')
-const UNSUPPORTED = Symbol('unsupported')
-const UNKNOWN = Symbol('unknown')
-const CHECKPATH = Symbol('checkPath')
-const MKDIR = Symbol('mkdir')
-const ONERROR = Symbol('onError')
-const PENDING = Symbol('pending')
-const PEND = Symbol('pend')
-const UNPEND = Symbol('unpend')
-const ENDED = Symbol('ended')
-const MAYBECLOSE = Symbol('maybeClose')
-const SKIP = Symbol('skip')
-const DOCHOWN = Symbol('doChown')
-const UID = Symbol('uid')
-const GID = Symbol('gid')
-const crypto = __webpack_require__(417)
-
-// Unlinks on Windows are not atomic.
-//
-// This means that if you have a file entry, followed by another
-// file entry with an identical name, and you cannot re-use the file
-// (because it's a hardlink, or because unlink:true is set, or it's
-// Windows, which does not have useful nlink values), then the unlink
-// will be committed to the disk AFTER the new file has been written
-// over the old one, deleting the new file.
-//
-// To work around this, on Windows systems, we rename the file and then
-// delete the renamed file. It's a sloppy kludge, but frankly, I do not
-// know of a better way to do this, given windows' non-atomic unlink
-// semantics.
-//
-// See: https://github.com/npm/node-tar/issues/183
-/* istanbul ignore next */
-const unlinkFile = (path, cb) => {
- if (process.platform !== 'win32')
- return fs.unlink(path, cb)
-
- const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex')
- fs.rename(path, name, er => {
- if (er)
- return cb(er)
- fs.unlink(name, cb)
- })
-}
-
-/* istanbul ignore next */
-const unlinkFileSync = path => {
- if (process.platform !== 'win32')
- return fs.unlinkSync(path)
-
- const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex')
- fs.renameSync(path, name)
- fs.unlinkSync(name)
-}
-
-// this.gid, entry.gid, this.processUid
-const uint32 = (a, b, c) =>
- a === a >>> 0 ? a
- : b === b >>> 0 ? b
- : c
-
-class Unpack extends Parser {
- constructor (opt) {
- if (!opt)
- opt = {}
-
- opt.ondone = _ => {
- this[ENDED] = true
- this[MAYBECLOSE]()
- }
-
- super(opt)
-
- this.transform = typeof opt.transform === 'function' ? opt.transform : null
-
- this.writable = true
- this.readable = false
-
- this[PENDING] = 0
- this[ENDED] = false
-
- this.dirCache = opt.dirCache || new Map()
-
- if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
- // need both or neither
- if (typeof opt.uid !== 'number' || typeof opt.gid !== 'number')
- throw new TypeError('cannot set owner without number uid and gid')
- if (opt.preserveOwner)
- throw new TypeError(
- 'cannot preserve owner in archive and also set owner explicitly')
- this.uid = opt.uid
- this.gid = opt.gid
- this.setOwner = true
- } else {
- this.uid = null
- this.gid = null
- this.setOwner = false
- }
-
- // default true for root
- if (opt.preserveOwner === undefined && typeof opt.uid !== 'number')
- this.preserveOwner = process.getuid && process.getuid() === 0
- else
- this.preserveOwner = !!opt.preserveOwner
-
- this.processUid = (this.preserveOwner || this.setOwner) && process.getuid ?
- process.getuid() : null
- this.processGid = (this.preserveOwner || this.setOwner) && process.getgid ?
- process.getgid() : null
+var CARETTRIM = R++
+src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+'
+re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g')
+var caretTrimReplace = '$1^'
- // mostly just for testing, but useful in some cases.
- // Forcibly trigger a chown on every entry, no matter what
- this.forceChown = opt.forceChown === true
+var CARET = R++
+src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$'
+var CARETLOOSE = R++
+src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$'
- // turn >| in filenames into 0xf000-higher encoded forms
- this.win32 = !!opt.win32 || process.platform === 'win32'
+// A simple gt/lt/eq thing, or just "" to indicate "any version"
+var COMPARATORLOOSE = R++
+src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$'
+var COMPARATOR = R++
+src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$'
- // do not unpack over files that are newer than what's in the archive
- this.newer = !!opt.newer
+// An expression to strip any whitespace between the gtlt and the thing
+// it modifies, so that `> 1.2.3` ==> `>1.2.3`
+var COMPARATORTRIM = R++
+src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] +
+ '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')'
- // do not unpack over ANY files
- this.keep = !!opt.keep
+// this one has to use the /g flag
+re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g')
+var comparatorTrimReplace = '$1$2$3'
- // do not set mtime/atime of extracted entries
- this.noMtime = !!opt.noMtime
+// Something like `1.2.3 - 1.2.4`
+// Note that these all use the loose form, because they'll be
+// checked against either the strict or loose comparator form
+// later.
+var HYPHENRANGE = R++
+src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' +
+ '\\s+-\\s+' +
+ '(' + src[XRANGEPLAIN] + ')' +
+ '\\s*$'
- // allow .., absolute path entries, and unpacking through symlinks
- // without this, warn and skip .., relativize absolutes, and error
- // on symlinks in extraction path
- this.preservePaths = !!opt.preservePaths
+var HYPHENRANGELOOSE = R++
+src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' +
+ '\\s+-\\s+' +
+ '(' + src[XRANGEPLAINLOOSE] + ')' +
+ '\\s*$'
- // unlink files and links before writing. This breaks existing hard
- // links, and removes symlink directories rather than erroring
- this.unlink = !!opt.unlink
+// Star ranges basically just allow anything at all.
+var STAR = R++
+src[STAR] = '(<|>)?=?\\s*\\*'
- this.cwd = path.resolve(opt.cwd || process.cwd())
- this.strip = +opt.strip || 0
- this.processUmask = process.umask()
- this.umask = typeof opt.umask === 'number' ? opt.umask : this.processUmask
- // default mode for dirs created as parents
- this.dmode = opt.dmode || (0o0777 & (~this.umask))
- this.fmode = opt.fmode || (0o0666 & (~this.umask))
- this.on('entry', entry => this[ONENTRY](entry))
+// Compile to actual regexp objects.
+// All are flag-free, unless they were created above with a flag.
+for (var i = 0; i < R; i++) {
+ debug(i, src[i])
+ if (!re[i]) {
+ re[i] = new RegExp(src[i])
}
+}
- [MAYBECLOSE] () {
- if (this[ENDED] && this[PENDING] === 0) {
- this.emit('prefinish')
- this.emit('finish')
- this.emit('end')
- this.emit('close')
+exports.parse = parse
+function parse (version, options) {
+ if (!options || typeof options !== 'object') {
+ options = {
+ loose: !!options,
+ includePrerelease: false
}
}
- [CHECKPATH] (entry) {
- if (this.strip) {
- const parts = entry.path.split(/\/|\\/)
- if (parts.length < this.strip)
- return false
- entry.path = parts.slice(this.strip).join('/')
-
- if (entry.type === 'Link') {
- const linkparts = entry.linkpath.split(/\/|\\/)
- if (linkparts.length >= this.strip)
- entry.linkpath = linkparts.slice(this.strip).join('/')
- }
- }
-
- if (!this.preservePaths) {
- const p = entry.path
- if (p.match(/(^|\/|\\)\.\.(\\|\/|$)/)) {
- this.warn('path contains \'..\'', p)
- return false
- }
-
- // absolutes on posix are also absolutes on win32
- // so we only need to test this one to get both
- if (path.win32.isAbsolute(p)) {
- const parsed = path.win32.parse(p)
- this.warn('stripping ' + parsed.root + ' from absolute path', p)
- entry.path = p.substr(parsed.root.length)
- }
- }
+ if (version instanceof SemVer) {
+ return version
+ }
- // only encode : chars that aren't drive letter indicators
- if (this.win32) {
- const parsed = path.win32.parse(entry.path)
- entry.path = parsed.root === '' ? wc.encode(entry.path)
- : parsed.root + wc.encode(entry.path.substr(parsed.root.length))
- }
+ if (typeof version !== 'string') {
+ return null
+ }
- if (path.isAbsolute(entry.path))
- entry.absolute = entry.path
- else
- entry.absolute = path.resolve(this.cwd, entry.path)
+ if (version.length > MAX_LENGTH) {
+ return null
+ }
- return true
+ var r = options.loose ? re[LOOSE] : re[FULL]
+ if (!r.test(version)) {
+ return null
}
- [ONENTRY] (entry) {
- if (!this[CHECKPATH](entry))
- return entry.resume()
+ try {
+ return new SemVer(version, options)
+ } catch (er) {
+ return null
+ }
+}
- assert.equal(typeof entry.absolute, 'string')
+exports.valid = valid
+function valid (version, options) {
+ var v = parse(version, options)
+ return v ? v.version : null
+}
- switch (entry.type) {
- case 'Directory':
- case 'GNUDumpDir':
- if (entry.mode)
- entry.mode = entry.mode | 0o700
+exports.clean = clean
+function clean (version, options) {
+ var s = parse(version.trim().replace(/^[=v]+/, ''), options)
+ return s ? s.version : null
+}
- case 'File':
- case 'OldFile':
- case 'ContiguousFile':
- case 'Link':
- case 'SymbolicLink':
- return this[CHECKFS](entry)
+exports.SemVer = SemVer
- case 'CharacterDevice':
- case 'BlockDevice':
- case 'FIFO':
- return this[UNSUPPORTED](entry)
+function SemVer (version, options) {
+ if (!options || typeof options !== 'object') {
+ options = {
+ loose: !!options,
+ includePrerelease: false
}
}
-
- [ONERROR] (er, entry) {
- // Cwd has to exist, or else nothing works. That's serious.
- // Other errors are warnings, which raise the error in strict
- // mode, but otherwise continue on.
- if (er.name === 'CwdError')
- this.emit('error', er)
- else {
- this.warn(er.message, er)
- this[UNPEND]()
- entry.resume()
+ if (version instanceof SemVer) {
+ if (version.loose === options.loose) {
+ return version
+ } else {
+ version = version.version
}
+ } else if (typeof version !== 'string') {
+ throw new TypeError('Invalid Version: ' + version)
}
- [MKDIR] (dir, mode, cb) {
- mkdir(dir, {
- uid: this.uid,
- gid: this.gid,
- processUid: this.processUid,
- processGid: this.processGid,
- umask: this.processUmask,
- preserve: this.preservePaths,
- unlink: this.unlink,
- cache: this.dirCache,
- cwd: this.cwd,
- mode: mode
- }, cb)
- }
-
- [DOCHOWN] (entry) {
- // in preserve owner mode, chown if the entry doesn't match process
- // in set owner mode, chown if setting doesn't match process
- return this.forceChown ||
- this.preserveOwner &&
- ( typeof entry.uid === 'number' && entry.uid !== this.processUid ||
- typeof entry.gid === 'number' && entry.gid !== this.processGid )
- ||
- ( typeof this.uid === 'number' && this.uid !== this.processUid ||
- typeof this.gid === 'number' && this.gid !== this.processGid )
- }
-
- [UID] (entry) {
- return uint32(this.uid, entry.uid, this.processUid)
+ if (version.length > MAX_LENGTH) {
+ throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters')
}
- [GID] (entry) {
- return uint32(this.gid, entry.gid, this.processGid)
+ if (!(this instanceof SemVer)) {
+ return new SemVer(version, options)
}
- [FILE] (entry) {
- const mode = entry.mode & 0o7777 || this.fmode
- const stream = new fsm.WriteStream(entry.absolute, {
- mode: mode,
- autoClose: false
- })
- stream.on('error', er => this[ONERROR](er, entry))
-
- let actions = 1
- const done = er => {
- if (er)
- return this[ONERROR](er, entry)
+ debug('SemVer', version, options)
+ this.options = options
+ this.loose = !!options.loose
- if (--actions === 0)
- fs.close(stream.fd, _ => this[UNPEND]())
- }
+ var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL])
- stream.on('finish', _ => {
- // if futimes fails, try utimes
- // if utimes fails, fail with the original error
- // same for fchown/chown
- const abs = entry.absolute
- const fd = stream.fd
+ if (!m) {
+ throw new TypeError('Invalid Version: ' + version)
+ }
- if (entry.mtime && !this.noMtime) {
- actions++
- const atime = entry.atime || new Date()
- const mtime = entry.mtime
- fs.futimes(fd, atime, mtime, er =>
- er ? fs.utimes(abs, atime, mtime, er2 => done(er2 && er))
- : done())
- }
+ this.raw = version
- if (this[DOCHOWN](entry)) {
- actions++
- const uid = this[UID](entry)
- const gid = this[GID](entry)
- fs.fchown(fd, uid, gid, er =>
- er ? fs.chown(abs, uid, gid, er2 => done(er2 && er))
- : done())
- }
+ // these are actually numbers
+ this.major = +m[1]
+ this.minor = +m[2]
+ this.patch = +m[3]
- done()
- })
+ if (this.major > MAX_SAFE_INTEGER || this.major < 0) {
+ throw new TypeError('Invalid major version')
+ }
- const tx = this.transform ? this.transform(entry) || entry : entry
- if (tx !== entry) {
- tx.on('error', er => this[ONERROR](er, entry))
- entry.pipe(tx)
- }
- tx.pipe(stream)
+ if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) {
+ throw new TypeError('Invalid minor version')
}
- [DIRECTORY] (entry) {
- const mode = entry.mode & 0o7777 || this.dmode
- this[MKDIR](entry.absolute, mode, er => {
- if (er)
- return this[ONERROR](er, entry)
+ if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) {
+ throw new TypeError('Invalid patch version')
+ }
- let actions = 1
- const done = _ => {
- if (--actions === 0) {
- this[UNPEND]()
- entry.resume()
+ // numberify any prerelease numeric ids
+ if (!m[4]) {
+ this.prerelease = []
+ } else {
+ this.prerelease = m[4].split('.').map(function (id) {
+ if (/^[0-9]+$/.test(id)) {
+ var num = +id
+ if (num >= 0 && num < MAX_SAFE_INTEGER) {
+ return num
}
}
-
- if (entry.mtime && !this.noMtime) {
- actions++
- fs.utimes(entry.absolute, entry.atime || new Date(), entry.mtime, done)
- }
-
- if (this[DOCHOWN](entry)) {
- actions++
- fs.chown(entry.absolute, this[UID](entry), this[GID](entry), done)
- }
-
- done()
+ return id
})
}
- [UNSUPPORTED] (entry) {
- this.warn('unsupported entry type: ' + entry.type, entry)
- entry.resume()
- }
+ this.build = m[5] ? m[5].split('.') : []
+ this.format()
+}
- [SYMLINK] (entry) {
- this[LINK](entry, entry.linkpath, 'symlink')
+SemVer.prototype.format = function () {
+ this.version = this.major + '.' + this.minor + '.' + this.patch
+ if (this.prerelease.length) {
+ this.version += '-' + this.prerelease.join('.')
}
+ return this.version
+}
- [HARDLINK] (entry) {
- this[LINK](entry, path.resolve(this.cwd, entry.linkpath), 'link')
- }
+SemVer.prototype.toString = function () {
+ return this.version
+}
- [PEND] () {
- this[PENDING]++
+SemVer.prototype.compare = function (other) {
+ debug('SemVer.compare', this.version, this.options, other)
+ if (!(other instanceof SemVer)) {
+ other = new SemVer(other, this.options)
}
- [UNPEND] () {
- this[PENDING]--
- this[MAYBECLOSE]()
+ return this.compareMain(other) || this.comparePre(other)
+}
+
+SemVer.prototype.compareMain = function (other) {
+ if (!(other instanceof SemVer)) {
+ other = new SemVer(other, this.options)
}
- [SKIP] (entry) {
- this[UNPEND]()
- entry.resume()
+ return compareIdentifiers(this.major, other.major) ||
+ compareIdentifiers(this.minor, other.minor) ||
+ compareIdentifiers(this.patch, other.patch)
+}
+
+SemVer.prototype.comparePre = function (other) {
+ if (!(other instanceof SemVer)) {
+ other = new SemVer(other, this.options)
}
- // Check if we can reuse an existing filesystem entry safely and
- // overwrite it, rather than unlinking and recreating
- // Windows doesn't report a useful nlink, so we just never reuse entries
- [ISREUSABLE] (entry, st) {
- return entry.type === 'File' &&
- !this.unlink &&
- st.isFile() &&
- st.nlink <= 1 &&
- process.platform !== 'win32'
+ // NOT having a prerelease is > having one
+ if (this.prerelease.length && !other.prerelease.length) {
+ return -1
+ } else if (!this.prerelease.length && other.prerelease.length) {
+ return 1
+ } else if (!this.prerelease.length && !other.prerelease.length) {
+ return 0
}
- // check if a thing is there, and if so, try to clobber it
- [CHECKFS] (entry) {
- this[PEND]()
- this[MKDIR](path.dirname(entry.absolute), this.dmode, er => {
- if (er)
- return this[ONERROR](er, entry)
- fs.lstat(entry.absolute, (er, st) => {
- if (st && (this.keep || this.newer && st.mtime > entry.mtime))
- this[SKIP](entry)
- else if (er || this[ISREUSABLE](entry, st))
- this[MAKEFS](null, entry)
- else if (st.isDirectory()) {
- if (entry.type === 'Directory') {
- if (!entry.mode || (st.mode & 0o7777) === entry.mode)
- this[MAKEFS](null, entry)
- else
- fs.chmod(entry.absolute, entry.mode, er => this[MAKEFS](er, entry))
- } else
- fs.rmdir(entry.absolute, er => this[MAKEFS](er, entry))
- } else
- unlinkFile(entry.absolute, er => this[MAKEFS](er, entry))
- })
- })
- }
-
- [MAKEFS] (er, entry) {
- if (er)
- return this[ONERROR](er, entry)
-
- switch (entry.type) {
- case 'File':
- case 'OldFile':
- case 'ContiguousFile':
- return this[FILE](entry)
-
- case 'Link':
- return this[HARDLINK](entry)
-
- case 'SymbolicLink':
- return this[SYMLINK](entry)
-
- case 'Directory':
- case 'GNUDumpDir':
- return this[DIRECTORY](entry)
+ var i = 0
+ do {
+ var a = this.prerelease[i]
+ var b = other.prerelease[i]
+ debug('prerelease compare', i, a, b)
+ if (a === undefined && b === undefined) {
+ return 0
+ } else if (b === undefined) {
+ return 1
+ } else if (a === undefined) {
+ return -1
+ } else if (a === b) {
+ continue
+ } else {
+ return compareIdentifiers(a, b)
}
- }
-
- [LINK] (entry, linkpath, link) {
- // XXX: get the type ('file' or 'dir') for windows
- fs[link](linkpath, entry.absolute, er => {
- if (er)
- return this[ONERROR](er, entry)
- this[UNPEND]()
- entry.resume()
- })
- }
+ } while (++i)
}
-class UnpackSync extends Unpack {
- constructor (opt) {
- super(opt)
- }
-
- [CHECKFS] (entry) {
- const er = this[MKDIR](path.dirname(entry.absolute), this.dmode)
- if (er)
- return this[ONERROR](er, entry)
- try {
- const st = fs.lstatSync(entry.absolute)
- if (this.keep || this.newer && st.mtime > entry.mtime)
- return this[SKIP](entry)
- else if (this[ISREUSABLE](entry, st))
- return this[MAKEFS](null, entry)
- else {
- try {
- if (st.isDirectory()) {
- if (entry.type === 'Directory') {
- if (entry.mode && (st.mode & 0o7777) !== entry.mode)
- fs.chmodSync(entry.absolute, entry.mode)
- } else
- fs.rmdirSync(entry.absolute)
- } else
- unlinkFileSync(entry.absolute)
- return this[MAKEFS](null, entry)
- } catch (er) {
- return this[ONERROR](er, entry)
- }
+// preminor will bump the version up to the next minor release, and immediately
+// down to pre-release. premajor and prepatch work the same way.
+SemVer.prototype.inc = function (release, identifier) {
+ switch (release) {
+ case 'premajor':
+ this.prerelease.length = 0
+ this.patch = 0
+ this.minor = 0
+ this.major++
+ this.inc('pre', identifier)
+ break
+ case 'preminor':
+ this.prerelease.length = 0
+ this.patch = 0
+ this.minor++
+ this.inc('pre', identifier)
+ break
+ case 'prepatch':
+ // If this is already a prerelease, it will bump to the next version
+ // drop any prereleases that might already exist, since they are not
+ // relevant at this point.
+ this.prerelease.length = 0
+ this.inc('patch', identifier)
+ this.inc('pre', identifier)
+ break
+ // If the input is a non-prerelease version, this acts the same as
+ // prepatch.
+ case 'prerelease':
+ if (this.prerelease.length === 0) {
+ this.inc('patch', identifier)
}
- } catch (er) {
- return this[MAKEFS](null, entry)
- }
- }
-
- [FILE] (entry) {
- const mode = entry.mode & 0o7777 || this.fmode
-
- const oner = er => {
- try { fs.closeSync(fd) } catch (_) {}
- if (er)
- this[ONERROR](er, entry)
- }
-
- let stream
- let fd
- try {
- fd = fs.openSync(entry.absolute, 'w', mode)
- } catch (er) {
- return oner(er)
- }
- const tx = this.transform ? this.transform(entry) || entry : entry
- if (tx !== entry) {
- tx.on('error', er => this[ONERROR](er, entry))
- entry.pipe(tx)
- }
+ this.inc('pre', identifier)
+ break
- tx.on('data', chunk => {
- try {
- fs.writeSync(fd, chunk, 0, chunk.length)
- } catch (er) {
- oner(er)
+ case 'major':
+ // If this is a pre-major version, bump up to the same major version.
+ // Otherwise increment major.
+ // 1.0.0-5 bumps to 1.0.0
+ // 1.1.0 bumps to 2.0.0
+ if (this.minor !== 0 ||
+ this.patch !== 0 ||
+ this.prerelease.length === 0) {
+ this.major++
}
- })
-
- tx.on('end', _ => {
- let er = null
- // try both, falling futimes back to utimes
- // if either fails, handle the first error
- if (entry.mtime && !this.noMtime) {
- const atime = entry.atime || new Date()
- const mtime = entry.mtime
- try {
- fs.futimesSync(fd, atime, mtime)
- } catch (futimeser) {
- try {
- fs.utimesSync(entry.absolute, atime, mtime)
- } catch (utimeser) {
- er = futimeser
+ this.minor = 0
+ this.patch = 0
+ this.prerelease = []
+ break
+ case 'minor':
+ // If this is a pre-minor version, bump up to the same minor version.
+ // Otherwise increment minor.
+ // 1.2.0-5 bumps to 1.2.0
+ // 1.2.1 bumps to 1.3.0
+ if (this.patch !== 0 || this.prerelease.length === 0) {
+ this.minor++
+ }
+ this.patch = 0
+ this.prerelease = []
+ break
+ case 'patch':
+ // If this is not a pre-release version, it will increment the patch.
+ // If it is a pre-release it will bump up to the same patch version.
+ // 1.2.0-5 patches to 1.2.0
+ // 1.2.0 patches to 1.2.1
+ if (this.prerelease.length === 0) {
+ this.patch++
+ }
+ this.prerelease = []
+ break
+ // This probably shouldn't be used publicly.
+ // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction.
+ case 'pre':
+ if (this.prerelease.length === 0) {
+ this.prerelease = [0]
+ } else {
+ var i = this.prerelease.length
+ while (--i >= 0) {
+ if (typeof this.prerelease[i] === 'number') {
+ this.prerelease[i]++
+ i = -2
}
}
+ if (i === -1) {
+ // didn't increment anything
+ this.prerelease.push(0)
+ }
}
-
- if (this[DOCHOWN](entry)) {
- const uid = this[UID](entry)
- const gid = this[GID](entry)
-
- try {
- fs.fchownSync(fd, uid, gid)
- } catch (fchowner) {
- try {
- fs.chownSync(entry.absolute, uid, gid)
- } catch (chowner) {
- er = er || fchowner
+ if (identifier) {
+ // 1.2.0-beta.1 bumps to 1.2.0-beta.2,
+ // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
+ if (this.prerelease[0] === identifier) {
+ if (isNaN(this.prerelease[1])) {
+ this.prerelease = [identifier, 0]
}
+ } else {
+ this.prerelease = [identifier, 0]
}
}
+ break
- oner(er)
- })
+ default:
+ throw new Error('invalid increment argument: ' + release)
}
+ this.format()
+ this.raw = this.version
+ return this
+}
- [DIRECTORY] (entry) {
- const mode = entry.mode & 0o7777 || this.dmode
- const er = this[MKDIR](entry.absolute, mode)
- if (er)
- return this[ONERROR](er, entry)
- if (entry.mtime && !this.noMtime) {
- try {
- fs.utimesSync(entry.absolute, entry.atime || new Date(), entry.mtime)
- } catch (er) {}
- }
- if (this[DOCHOWN](entry)) {
- try {
- fs.chownSync(entry.absolute, this[UID](entry), this[GID](entry))
- } catch (er) {}
- }
- entry.resume()
+exports.inc = inc
+function inc (version, release, loose, identifier) {
+ if (typeof (loose) === 'string') {
+ identifier = loose
+ loose = undefined
}
- [MKDIR] (dir, mode) {
- try {
- return mkdir.sync(dir, {
- uid: this.uid,
- gid: this.gid,
- processUid: this.processUid,
- processGid: this.processGid,
- umask: this.processUmask,
- preserve: this.preservePaths,
- unlink: this.unlink,
- cache: this.dirCache,
- cwd: this.cwd,
- mode: mode
- })
- } catch (er) {
- return er
- }
+ try {
+ return new SemVer(version, loose).inc(release, identifier).version
+ } catch (er) {
+ return null
}
+}
- [LINK] (entry, linkpath, link) {
- try {
- fs[link + 'Sync'](linkpath, entry.absolute)
- entry.resume()
- } catch (er) {
- return this[ONERROR](er, entry)
+exports.diff = diff
+function diff (version1, version2) {
+ if (eq(version1, version2)) {
+ return null
+ } else {
+ var v1 = parse(version1)
+ var v2 = parse(version2)
+ var prefix = ''
+ if (v1.prerelease.length || v2.prerelease.length) {
+ prefix = 'pre'
+ var defaultResult = 'prerelease'
+ }
+ for (var key in v1) {
+ if (key === 'major' || key === 'minor' || key === 'patch') {
+ if (v1[key] !== v2[key]) {
+ return prefix + key
+ }
+ }
}
+ return defaultResult // may be undefined
}
}
-Unpack.Sync = UnpackSync
-module.exports = Unpack
+exports.compareIdentifiers = compareIdentifiers
+var numeric = /^[0-9]+$/
+function compareIdentifiers (a, b) {
+ var anum = numeric.test(a)
+ var bnum = numeric.test(b)
-/***/ }),
-/* 64 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+ if (anum && bnum) {
+ a = +a
+ b = +b
+ }
-"use strict";
+ return a === b ? 0
+ : (anum && !bnum) ? -1
+ : (bnum && !anum) ? 1
+ : a < b ? -1
+ : 1
+}
-const path = __webpack_require__(622);
-const os = __webpack_require__(87);
+exports.rcompareIdentifiers = rcompareIdentifiers
+function rcompareIdentifiers (a, b) {
+ return compareIdentifiers(b, a)
+}
-const homedir = os.homedir();
-const tmpdir = os.tmpdir();
-const {env} = process;
+exports.major = major
+function major (a, loose) {
+ return new SemVer(a, loose).major
+}
-const macos = name => {
- const library = path.join(homedir, 'Library');
+exports.minor = minor
+function minor (a, loose) {
+ return new SemVer(a, loose).minor
+}
- return {
- data: path.join(library, 'Application Support', name),
- config: path.join(library, 'Preferences', name),
- cache: path.join(library, 'Caches', name),
- log: path.join(library, 'Logs', name),
- temp: path.join(tmpdir, name)
- };
-};
+exports.patch = patch
+function patch (a, loose) {
+ return new SemVer(a, loose).patch
+}
-const windows = name => {
- const appData = env.APPDATA || path.join(homedir, 'AppData', 'Roaming');
- const localAppData = env.LOCALAPPDATA || path.join(homedir, 'AppData', 'Local');
+exports.compare = compare
+function compare (a, b, loose) {
+ return new SemVer(a, loose).compare(new SemVer(b, loose))
+}
- return {
- // Data/config/cache/log are invented by me as Windows isn't opinionated about this
- data: path.join(localAppData, name, 'Data'),
- config: path.join(appData, name, 'Config'),
- cache: path.join(localAppData, name, 'Cache'),
- log: path.join(localAppData, name, 'Log'),
- temp: path.join(tmpdir, name)
- };
-};
+exports.compareLoose = compareLoose
+function compareLoose (a, b) {
+ return compare(a, b, true)
+}
-// https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html
-const linux = name => {
- const username = path.basename(homedir);
+exports.rcompare = rcompare
+function rcompare (a, b, loose) {
+ return compare(b, a, loose)
+}
- return {
- data: path.join(env.XDG_DATA_HOME || path.join(homedir, '.local', 'share'), name),
- config: path.join(env.XDG_CONFIG_HOME || path.join(homedir, '.config'), name),
- cache: path.join(env.XDG_CACHE_HOME || path.join(homedir, '.cache'), name),
- // https://wiki.debian.org/XDGBaseDirectorySpecification#state
- log: path.join(env.XDG_STATE_HOME || path.join(homedir, '.local', 'state'), name),
- temp: path.join(tmpdir, username, name)
- };
-};
+exports.sort = sort
+function sort (list, loose) {
+ return list.sort(function (a, b) {
+ return exports.compare(a, b, loose)
+ })
+}
-const envPaths = (name, options) => {
- if (typeof name !== 'string') {
- throw new TypeError(`Expected string, got ${typeof name}`);
- }
+exports.rsort = rsort
+function rsort (list, loose) {
+ return list.sort(function (a, b) {
+ return exports.rcompare(a, b, loose)
+ })
+}
- options = Object.assign({suffix: 'nodejs'}, options);
+exports.gt = gt
+function gt (a, b, loose) {
+ return compare(a, b, loose) > 0
+}
- if (options.suffix) {
- // Add suffix to prevent possible conflict with native apps
- name += `-${options.suffix}`;
- }
+exports.lt = lt
+function lt (a, b, loose) {
+ return compare(a, b, loose) < 0
+}
- if (process.platform === 'darwin') {
- return macos(name);
- }
+exports.eq = eq
+function eq (a, b, loose) {
+ return compare(a, b, loose) === 0
+}
- if (process.platform === 'win32') {
- return windows(name);
- }
+exports.neq = neq
+function neq (a, b, loose) {
+ return compare(a, b, loose) !== 0
+}
- return linux(name);
-};
+exports.gte = gte
+function gte (a, b, loose) {
+ return compare(a, b, loose) >= 0
+}
-module.exports = envPaths;
-// TODO: Remove this for the next major release
-module.exports.default = envPaths;
+exports.lte = lte
+function lte (a, b, loose) {
+ return compare(a, b, loose) <= 0
+}
+exports.cmp = cmp
+function cmp (a, op, b, loose) {
+ switch (op) {
+ case '===':
+ if (typeof a === 'object')
+ a = a.version
+ if (typeof b === 'object')
+ b = b.version
+ return a === b
-/***/ }),
-/* 65 */
-/***/ (function(module) {
+ case '!==':
+ if (typeof a === 'object')
+ a = a.version
+ if (typeof b === 'object')
+ b = b.version
+ return a !== b
-// Generated by CoffeeScript 1.12.7
-(function() {
- module.exports = {
- Disconnected: 1,
- Preceding: 2,
- Following: 4,
- Contains: 8,
- ContainedBy: 16,
- ImplementationSpecific: 32
- };
+ case '':
+ case '=':
+ case '==':
+ return eq(a, b, loose)
-}).call(this);
+ case '!=':
+ return neq(a, b, loose)
+ case '>':
+ return gt(a, b, loose)
-/***/ }),
-/* 66 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+ case '>=':
+ return gte(a, b, loose)
-"use strict";
+ case '<':
+ return lt(a, b, loose)
-var stripAnsi = __webpack_require__(774);
-var codePointAt = __webpack_require__(995);
-var isFullwidthCodePoint = __webpack_require__(61);
+ case '<=':
+ return lte(a, b, loose)
-// https://github.com/nodejs/io.js/blob/cff7300a578be1b10001f2d967aaedc88aee6402/lib/readline.js#L1345
-module.exports = function (str) {
- if (typeof str !== 'string' || str.length === 0) {
- return 0;
- }
-
- var width = 0;
-
- str = stripAnsi(str);
-
- for (var i = 0; i < str.length; i++) {
- var code = codePointAt(str, i);
-
- // ignore control characters
- if (code <= 0x1f || (code >= 0x7f && code <= 0x9f)) {
- continue;
- }
-
- // surrogates
- if (code >= 0x10000) {
- i++;
- }
-
- if (isFullwidthCodePoint(code)) {
- width += 2;
- } else {
- width++;
- }
- }
-
- return width;
-};
-
-
-/***/ }),
-/* 67 */,
-/* 68 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-"use strict";
-
-
-/**
- * headers.js
- *
- * Headers class offers convenient helpers
- */
-
-const common = __webpack_require__(477)
-const checkInvalidHeaderChar = common.checkInvalidHeaderChar
-const checkIsHttpToken = common.checkIsHttpToken
-
-function sanitizeName (name) {
- name += ''
- if (!checkIsHttpToken(name)) {
- throw new TypeError(`${name} is not a legal HTTP header name`)
- }
- return name.toLowerCase()
-}
-
-function sanitizeValue (value) {
- value += ''
- if (checkInvalidHeaderChar(value)) {
- throw new TypeError(`${value} is not a legal HTTP header value`)
+ default:
+ throw new TypeError('Invalid operator: ' + op)
}
- return value
}
-const MAP = Symbol('map')
-class Headers {
- /**
- * Headers class
- *
- * @param Object headers Response headers
- * @return Void
- */
- constructor (init) {
- this[MAP] = Object.create(null)
-
- if (init instanceof Headers) {
- const rawHeaders = init.raw()
- const headerNames = Object.keys(rawHeaders)
-
- for (const headerName of headerNames) {
- for (const value of rawHeaders[headerName]) {
- this.append(headerName, value)
- }
- }
-
- return
+exports.Comparator = Comparator
+function Comparator (comp, options) {
+ if (!options || typeof options !== 'object') {
+ options = {
+ loose: !!options,
+ includePrerelease: false
}
+ }
- // We don't worry about converting prop to ByteString here as append()
- // will handle it.
- if (init == null) {
- // no op
- } else if (typeof init === 'object') {
- const method = init[Symbol.iterator]
- if (method != null) {
- if (typeof method !== 'function') {
- throw new TypeError('Header pairs must be iterable')
- }
-
- // sequence>
- // Note: per spec we have to first exhaust the lists then process them
- const pairs = []
- for (const pair of init) {
- if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {
- throw new TypeError('Each header pair must be iterable')
- }
- pairs.push(Array.from(pair))
- }
-
- for (const pair of pairs) {
- if (pair.length !== 2) {
- throw new TypeError('Each header pair must be a name/value tuple')
- }
- this.append(pair[0], pair[1])
- }
- } else {
- // record
- for (const key of Object.keys(init)) {
- const value = init[key]
- this.append(key, value)
- }
- }
+ if (comp instanceof Comparator) {
+ if (comp.loose === !!options.loose) {
+ return comp
} else {
- throw new TypeError('Provided initializer must be an object')
+ comp = comp.value
}
+ }
- Object.defineProperty(this, Symbol.toStringTag, {
- value: 'Headers',
- writable: false,
- enumerable: false,
- configurable: true
- })
+ if (!(this instanceof Comparator)) {
+ return new Comparator(comp, options)
}
- /**
- * Return first header value given name
- *
- * @param String name Header name
- * @return Mixed
- */
- get (name) {
- const list = this[MAP][sanitizeName(name)]
- if (!list) {
- return null
- }
+ debug('comparator', comp, options)
+ this.options = options
+ this.loose = !!options.loose
+ this.parse(comp)
- return list.join(', ')
+ if (this.semver === ANY) {
+ this.value = ''
+ } else {
+ this.value = this.operator + this.semver.version
}
- /**
- * Iterate over all headers
- *
- * @param Function callback Executed for each item with parameters (value, name, thisArg)
- * @param Boolean thisArg `this` context for callback function
- * @return Void
- */
- forEach (callback, thisArg) {
- let pairs = getHeaderPairs(this)
- let i = 0
- while (i < pairs.length) {
- const name = pairs[i][0]
- const value = pairs[i][1]
- callback.call(thisArg, value, name, this)
- pairs = getHeaderPairs(this)
- i++
- }
- }
+ debug('comp', this)
+}
- /**
- * Overwrite header values given name
- *
- * @param String name Header name
- * @param String value Header value
- * @return Void
- */
- set (name, value) {
- this[MAP][sanitizeName(name)] = [sanitizeValue(value)]
- }
+var ANY = {}
+Comparator.prototype.parse = function (comp) {
+ var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR]
+ var m = comp.match(r)
- /**
- * Append a value onto existing header
- *
- * @param String name Header name
- * @param String value Header value
- * @return Void
- */
- append (name, value) {
- if (!this.has(name)) {
- this.set(name, value)
- return
- }
+ if (!m) {
+ throw new TypeError('Invalid comparator: ' + comp)
+ }
- this[MAP][sanitizeName(name)].push(sanitizeValue(value))
+ this.operator = m[1]
+ if (this.operator === '=') {
+ this.operator = ''
}
- /**
- * Check for header name existence
- *
- * @param String name Header name
- * @return Boolean
- */
- has (name) {
- return !!this[MAP][sanitizeName(name)]
+ // if it literally is just '>' or '' then allow anything.
+ if (!m[2]) {
+ this.semver = ANY
+ } else {
+ this.semver = new SemVer(m[2], this.options.loose)
}
+}
- /**
- * Delete all header values given name
- *
- * @param String name Header name
- * @return Void
- */
- delete (name) {
- delete this[MAP][sanitizeName(name)]
- };
+Comparator.prototype.toString = function () {
+ return this.value
+}
- /**
- * Return raw headers (non-spec api)
- *
- * @return Object
- */
- raw () {
- return this[MAP]
- }
+Comparator.prototype.test = function (version) {
+ debug('Comparator.test', version, this.options.loose)
- /**
- * Get an iterator on keys.
- *
- * @return Iterator
- */
- keys () {
- return createHeadersIterator(this, 'key')
+ if (this.semver === ANY) {
+ return true
}
- /**
- * Get an iterator on values.
- *
- * @return Iterator
- */
- values () {
- return createHeadersIterator(this, 'value')
+ if (typeof version === 'string') {
+ version = new SemVer(version, this.options)
}
- /**
- * Get an iterator on entries.
- *
- * This is the default iterator of the Headers object.
- *
- * @return Iterator
- */
- [Symbol.iterator] () {
- return createHeadersIterator(this, 'key+value')
- }
+ return cmp(version, this.operator, this.semver, this.options)
}
-Headers.prototype.entries = Headers.prototype[Symbol.iterator]
-Object.defineProperty(Headers.prototype, Symbol.toStringTag, {
- value: 'HeadersPrototype',
- writable: false,
- enumerable: false,
- configurable: true
-})
+Comparator.prototype.intersects = function (comp, options) {
+ if (!(comp instanceof Comparator)) {
+ throw new TypeError('a Comparator is required')
+ }
-function getHeaderPairs (headers, kind) {
- const keys = Object.keys(headers[MAP]).sort()
- return keys.map(
- kind === 'key'
- ? k => [k]
- : k => [k, headers.get(k)]
- )
-}
+ if (!options || typeof options !== 'object') {
+ options = {
+ loose: !!options,
+ includePrerelease: false
+ }
+ }
-const INTERNAL = Symbol('internal')
+ var rangeTmp
-function createHeadersIterator (target, kind) {
- const iterator = Object.create(HeadersIteratorPrototype)
- iterator[INTERNAL] = {
- target,
- kind,
- index: 0
+ if (this.operator === '') {
+ rangeTmp = new Range(comp.value, options)
+ return satisfies(this.value, rangeTmp, options)
+ } else if (comp.operator === '') {
+ rangeTmp = new Range(this.value, options)
+ return satisfies(comp.semver, rangeTmp, options)
}
- return iterator
-}
-const HeadersIteratorPrototype = Object.setPrototypeOf({
- next () {
- // istanbul ignore if
- if (!this ||
- Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
- throw new TypeError('Value of `this` is not a HeadersIterator')
- }
+ var sameDirectionIncreasing =
+ (this.operator === '>=' || this.operator === '>') &&
+ (comp.operator === '>=' || comp.operator === '>')
+ var sameDirectionDecreasing =
+ (this.operator === '<=' || this.operator === '<') &&
+ (comp.operator === '<=' || comp.operator === '<')
+ var sameSemVer = this.semver.version === comp.semver.version
+ var differentDirectionsInclusive =
+ (this.operator === '>=' || this.operator === '<=') &&
+ (comp.operator === '>=' || comp.operator === '<=')
+ var oppositeDirectionsLessThan =
+ cmp(this.semver, '<', comp.semver, options) &&
+ ((this.operator === '>=' || this.operator === '>') &&
+ (comp.operator === '<=' || comp.operator === '<'))
+ var oppositeDirectionsGreaterThan =
+ cmp(this.semver, '>', comp.semver, options) &&
+ ((this.operator === '<=' || this.operator === '<') &&
+ (comp.operator === '>=' || comp.operator === '>'))
- const target = this[INTERNAL].target
- const kind = this[INTERNAL].kind
- const index = this[INTERNAL].index
- const values = getHeaderPairs(target, kind)
- const len = values.length
- if (index >= len) {
- return {
- value: undefined,
- done: true
- }
- }
+ return sameDirectionIncreasing || sameDirectionDecreasing ||
+ (sameSemVer && differentDirectionsInclusive) ||
+ oppositeDirectionsLessThan || oppositeDirectionsGreaterThan
+}
- const pair = values[index]
- this[INTERNAL].index = index + 1
+exports.Range = Range
+function Range (range, options) {
+ if (!options || typeof options !== 'object') {
+ options = {
+ loose: !!options,
+ includePrerelease: false
+ }
+ }
- let result
- if (kind === 'key') {
- result = pair[0]
- } else if (kind === 'value') {
- result = pair[1]
+ if (range instanceof Range) {
+ if (range.loose === !!options.loose &&
+ range.includePrerelease === !!options.includePrerelease) {
+ return range
} else {
- result = pair
+ return new Range(range.raw, options)
}
+ }
- return {
- value: result,
- done: false
- }
+ if (range instanceof Comparator) {
+ return new Range(range.value, options)
}
-}, Object.getPrototypeOf(
- Object.getPrototypeOf([][Symbol.iterator]())
-))
-Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
- value: 'HeadersIterator',
- writable: false,
- enumerable: false,
- configurable: true
-})
+ if (!(this instanceof Range)) {
+ return new Range(range, options)
+ }
-module.exports = Headers
+ this.options = options
+ this.loose = !!options.loose
+ this.includePrerelease = !!options.includePrerelease
+ // First, split based on boolean or ||
+ this.raw = range
+ this.set = range.split(/\s*\|\|\s*/).map(function (range) {
+ return this.parseRange(range.trim())
+ }, this).filter(function (c) {
+ // throw out any that are not relevant for whatever reason
+ return c.length
+ })
-/***/ }),
-/* 69 */
-/***/ (function(module) {
+ if (!this.set.length) {
+ throw new TypeError('Invalid SemVer Range: ' + range)
+ }
-// populates missing values
-module.exports = function(dst, src) {
+ this.format()
+}
- Object.keys(src).forEach(function(prop)
- {
- dst[prop] = dst[prop] || src[prop];
- });
+Range.prototype.format = function () {
+ this.range = this.set.map(function (comps) {
+ return comps.join(' ').trim()
+ }).join('||').trim()
+ return this.range
+}
- return dst;
-};
+Range.prototype.toString = function () {
+ return this.range
+}
+Range.prototype.parseRange = function (range) {
+ var loose = this.options.loose
+ range = range.trim()
+ // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
+ var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE]
+ range = range.replace(hr, hyphenReplace)
+ debug('hyphen replace', range)
+ // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
+ range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace)
+ debug('comparator trim', range, re[COMPARATORTRIM])
-/***/ }),
-/* 70 */
-/***/ (function(__unusedmodule, exports) {
+ // `~ 1.2.3` => `~1.2.3`
+ range = range.replace(re[TILDETRIM], tildeTrimReplace)
-"use strict";
+ // `^ 1.2.3` => `^1.2.3`
+ range = range.replace(re[CARETTRIM], caretTrimReplace)
-Object.defineProperty(exports, "__esModule", { value: true });
-//# sourceMappingURL=trace_state.js.map
+ // normalize spaces
+ range = range.split(/\s+/).join(' ')
-/***/ }),
-/* 71 */
-/***/ (function() {
+ // At this point, the range is completely trimmed and
+ // ready to be split into comparators.
-"use strict";
+ var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR]
+ var set = range.split(' ').map(function (comp) {
+ return parseComparator(comp, this.options)
+ }, this).join(' ').split(/\s+/)
+ if (this.options.loose) {
+ // in loose mode, throw out any that are not valid comparators
+ set = set.filter(function (comp) {
+ return !!comp.match(compRe)
+ })
+ }
+ set = set.map(function (comp) {
+ return new Comparator(comp, this.options)
+ }, this)
-if (typeof Symbol === undefined || !Symbol.asyncIterator) {
- Symbol.asyncIterator = Symbol.for("Symbol.asyncIterator");
+ return set
}
-//# sourceMappingURL=index.js.map
-
-/***/ }),
-/* 72 */
-/***/ (function(module) {
-"use strict";
+Range.prototype.intersects = function (range, options) {
+ if (!(range instanceof Range)) {
+ throw new TypeError('a Range is required')
+ }
+ return this.set.some(function (thisComparators) {
+ return thisComparators.every(function (thisComparator) {
+ return range.set.some(function (rangeComparators) {
+ return rangeComparators.every(function (rangeComparator) {
+ return thisComparator.intersects(rangeComparator, options)
+ })
+ })
+ })
+ })
+}
-const pTry = (fn, ...arguments_) => new Promise(resolve => {
- resolve(fn(...arguments_));
-});
+// Mostly just for testing and legacy API reasons
+exports.toComparators = toComparators
+function toComparators (range, options) {
+ return new Range(range, options).set.map(function (comp) {
+ return comp.map(function (c) {
+ return c.value
+ }).join(' ').trim().split(' ')
+ })
+}
-module.exports = pTry;
-// TODO: remove this in the next major version
-module.exports.default = pTry;
-
-
-/***/ }),
-/* 73 */,
-/* 74 */,
-/* 75 */,
-/* 76 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-"use strict";
-
-
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
-
-var Buffer = __webpack_require__(254).Buffer;
-var util = __webpack_require__(669);
-
-function copyBuffer(src, target, offset) {
- src.copy(target, offset);
+// comprised of xranges, tildes, stars, and gtlt's at this point.
+// already replaced the hyphen ranges
+// turn into a set of JUST comparators.
+function parseComparator (comp, options) {
+ debug('comp', comp, options)
+ comp = replaceCarets(comp, options)
+ debug('caret', comp)
+ comp = replaceTildes(comp, options)
+ debug('tildes', comp)
+ comp = replaceXRanges(comp, options)
+ debug('xrange', comp)
+ comp = replaceStars(comp, options)
+ debug('stars', comp)
+ return comp
}
-module.exports = function () {
- function BufferList() {
- _classCallCheck(this, BufferList);
+function isX (id) {
+ return !id || id.toLowerCase() === 'x' || id === '*'
+}
- this.head = null;
- this.tail = null;
- this.length = 0;
- }
+// ~, ~> --> * (any, kinda silly)
+// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
+// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
+// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
+// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
+// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
+function replaceTildes (comp, options) {
+ return comp.trim().split(/\s+/).map(function (comp) {
+ return replaceTilde(comp, options)
+ }).join(' ')
+}
- BufferList.prototype.push = function push(v) {
- var entry = { data: v, next: null };
- if (this.length > 0) this.tail.next = entry;else this.head = entry;
- this.tail = entry;
- ++this.length;
- };
+function replaceTilde (comp, options) {
+ var r = options.loose ? re[TILDELOOSE] : re[TILDE]
+ return comp.replace(r, function (_, M, m, p, pr) {
+ debug('tilde', comp, _, M, m, p, pr)
+ var ret
- BufferList.prototype.unshift = function unshift(v) {
- var entry = { data: v, next: this.head };
- if (this.length === 0) this.tail = entry;
- this.head = entry;
- ++this.length;
- };
+ if (isX(M)) {
+ ret = ''
+ } else if (isX(m)) {
+ ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
+ } else if (isX(p)) {
+ // ~1.2 == >=1.2.0 <1.3.0
+ ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
+ } else if (pr) {
+ debug('replaceTilde pr', pr)
+ ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
+ ' <' + M + '.' + (+m + 1) + '.0'
+ } else {
+ // ~1.2.3 == >=1.2.3 <1.3.0
+ ret = '>=' + M + '.' + m + '.' + p +
+ ' <' + M + '.' + (+m + 1) + '.0'
+ }
- BufferList.prototype.shift = function shift() {
- if (this.length === 0) return;
- var ret = this.head.data;
- if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;
- --this.length;
- return ret;
- };
+ debug('tilde return', ret)
+ return ret
+ })
+}
- BufferList.prototype.clear = function clear() {
- this.head = this.tail = null;
- this.length = 0;
- };
+// ^ --> * (any, kinda silly)
+// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0
+// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0
+// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0
+// ^1.2.3 --> >=1.2.3 <2.0.0
+// ^1.2.0 --> >=1.2.0 <2.0.0
+function replaceCarets (comp, options) {
+ return comp.trim().split(/\s+/).map(function (comp) {
+ return replaceCaret(comp, options)
+ }).join(' ')
+}
- BufferList.prototype.join = function join(s) {
- if (this.length === 0) return '';
- var p = this.head;
- var ret = '' + p.data;
- while (p = p.next) {
- ret += s + p.data;
- }return ret;
- };
+function replaceCaret (comp, options) {
+ debug('caret', comp, options)
+ var r = options.loose ? re[CARETLOOSE] : re[CARET]
+ return comp.replace(r, function (_, M, m, p, pr) {
+ debug('caret', comp, _, M, m, p, pr)
+ var ret
- BufferList.prototype.concat = function concat(n) {
- if (this.length === 0) return Buffer.alloc(0);
- if (this.length === 1) return this.head.data;
- var ret = Buffer.allocUnsafe(n >>> 0);
- var p = this.head;
- var i = 0;
- while (p) {
- copyBuffer(p.data, ret, i);
- i += p.data.length;
- p = p.next;
+ if (isX(M)) {
+ ret = ''
+ } else if (isX(m)) {
+ ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
+ } else if (isX(p)) {
+ if (M === '0') {
+ ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
+ } else {
+ ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0'
+ }
+ } else if (pr) {
+ debug('replaceCaret pr', pr)
+ if (M === '0') {
+ if (m === '0') {
+ ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
+ ' <' + M + '.' + m + '.' + (+p + 1)
+ } else {
+ ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
+ ' <' + M + '.' + (+m + 1) + '.0'
+ }
+ } else {
+ ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
+ ' <' + (+M + 1) + '.0.0'
+ }
+ } else {
+ debug('no pr')
+ if (M === '0') {
+ if (m === '0') {
+ ret = '>=' + M + '.' + m + '.' + p +
+ ' <' + M + '.' + m + '.' + (+p + 1)
+ } else {
+ ret = '>=' + M + '.' + m + '.' + p +
+ ' <' + M + '.' + (+m + 1) + '.0'
+ }
+ } else {
+ ret = '>=' + M + '.' + m + '.' + p +
+ ' <' + (+M + 1) + '.0.0'
+ }
}
- return ret;
- };
- return BufferList;
-}();
-
-if (util && util.inspect && util.inspect.custom) {
- module.exports.prototype[util.inspect.custom] = function () {
- var obj = util.inspect({ length: this.length });
- return this.constructor.name + ' ' + obj;
- };
+ debug('caret return', ret)
+ return ret
+ })
}
-/***/ }),
-/* 77 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
+function replaceXRanges (comp, options) {
+ debug('replaceXRanges', comp, options)
+ return comp.split(/\s+/).map(function (comp) {
+ return replaceXRange(comp, options)
+ }).join(' ')
+}
-"use strict";
+function replaceXRange (comp, options) {
+ comp = comp.trim()
+ var r = options.loose ? re[XRANGELOOSE] : re[XRANGE]
+ return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
+ debug('xRange', comp, ret, gtlt, M, m, p, pr)
+ var xM = isX(M)
+ var xm = xM || isX(m)
+ var xp = xm || isX(p)
+ var anyX = xp
-/*
- * Copyright The OpenTelemetry Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.ContextAPI = void 0;
-var context_base_1 = __webpack_require__(459);
-var global_utils_1 = __webpack_require__(976);
-var NOOP_CONTEXT_MANAGER = new context_base_1.NoopContextManager();
-/**
- * Singleton object which represents the entry point to the OpenTelemetry Context API
- */
-var ContextAPI = /** @class */ (function () {
- /** Empty private constructor prevents end users from constructing a new instance of the API */
- function ContextAPI() {
+ if (gtlt === '=' && anyX) {
+ gtlt = ''
}
- /** Get the singleton instance of the Context API */
- ContextAPI.getInstance = function () {
- if (!this._instance) {
- this._instance = new ContextAPI();
+
+ if (xM) {
+ if (gtlt === '>' || gtlt === '<') {
+ // nothing is allowed
+ ret = '<0.0.0'
+ } else {
+ // nothing is forbidden
+ ret = '*'
+ }
+ } else if (gtlt && anyX) {
+ // we know patch is an x, because we have any x at all.
+ // replace X with 0
+ if (xm) {
+ m = 0
+ }
+ p = 0
+
+ if (gtlt === '>') {
+ // >1 => >=2.0.0
+ // >1.2 => >=1.3.0
+ // >1.2.3 => >= 1.2.4
+ gtlt = '>='
+ if (xm) {
+ M = +M + 1
+ m = 0
+ p = 0
+ } else {
+ m = +m + 1
+ p = 0
}
- return this._instance;
- };
- /**
- * Set the current context manager. Returns the initialized context manager
- */
- ContextAPI.prototype.setGlobalContextManager = function (contextManager) {
- if (global_utils_1._global[global_utils_1.GLOBAL_CONTEXT_MANAGER_API_KEY]) {
- // global context manager has already been set
- return this._getContextManager();
+ } else if (gtlt === '<=') {
+ // <=0.7.x is actually <0.8.0, since any 0.7.x should
+ // pass. Similarly, <=7.x is actually <8.0.0, etc.
+ gtlt = '<'
+ if (xm) {
+ M = +M + 1
+ } else {
+ m = +m + 1
}
- global_utils_1._global[global_utils_1.GLOBAL_CONTEXT_MANAGER_API_KEY] = global_utils_1.makeGetter(global_utils_1.API_BACKWARDS_COMPATIBILITY_VERSION, contextManager, NOOP_CONTEXT_MANAGER);
- return contextManager;
- };
- /**
- * Get the currently active context
- */
- ContextAPI.prototype.active = function () {
- return this._getContextManager().active();
- };
- /**
- * Execute a function with an active context
- *
- * @param context context to be active during function execution
- * @param fn function to execute in a context
- */
- ContextAPI.prototype.with = function (context, fn) {
- return this._getContextManager().with(context, fn);
- };
- /**
- * Bind a context to a target function or event emitter
- *
- * @param target function or event emitter to bind
- * @param context context to bind to the event emitter or function. Defaults to the currently active context
- */
- ContextAPI.prototype.bind = function (target, context) {
- if (context === void 0) { context = this.active(); }
- return this._getContextManager().bind(target, context);
- };
- ContextAPI.prototype._getContextManager = function () {
- var _a, _b;
- return ((_b = (_a = global_utils_1._global[global_utils_1.GLOBAL_CONTEXT_MANAGER_API_KEY]) === null || _a === void 0 ? void 0 : _a.call(global_utils_1._global, global_utils_1.API_BACKWARDS_COMPATIBILITY_VERSION)) !== null && _b !== void 0 ? _b : NOOP_CONTEXT_MANAGER);
- };
- /** Disable and remove the global context manager */
- ContextAPI.prototype.disable = function () {
- this._getContextManager().disable();
- delete global_utils_1._global[global_utils_1.GLOBAL_CONTEXT_MANAGER_API_KEY];
- };
- return ContextAPI;
-}());
-exports.ContextAPI = ContextAPI;
-//# sourceMappingURL=context.js.map
-
-/***/ }),
-/* 78 */,
-/* 79 */,
-/* 80 */
-/***/ (function(module) {
+ }
-"use strict";
+ ret = gtlt + M + '.' + m + '.' + p
+ } else if (xm) {
+ ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
+ } else if (xp) {
+ ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
+ }
+ debug('xRange return', ret)
-module.exports = parseJson
-function parseJson (txt, reviver, context) {
- context = context || 20
- try {
- return JSON.parse(txt, reviver)
- } catch (e) {
- if (typeof txt !== 'string') {
- const isEmptyArray = Array.isArray(txt) && txt.length === 0
- const errorMessage = 'Cannot parse ' +
- (isEmptyArray ? 'an empty array' : String(txt))
- throw new TypeError(errorMessage)
- }
- const syntaxErr = e.message.match(/^Unexpected token.*position\s+(\d+)/i)
- const errIdx = syntaxErr
- ? +syntaxErr[1]
- : e.message.match(/^Unexpected end of JSON.*/i)
- ? txt.length - 1
- : null
- if (errIdx != null) {
- const start = errIdx <= context
- ? 0
- : errIdx - context
- const end = errIdx + context >= txt.length
- ? txt.length
- : errIdx + context
- e.message += ` while parsing near '${
- start === 0 ? '' : '...'
- }${txt.slice(start, end)}${
- end === txt.length ? '' : '...'
- }'`
- } else {
- e.message += ` while parsing '${txt.slice(0, context * 2)}'`
- }
- throw e
- }
+ return ret
+ })
}
+// Because * is AND-ed with everything else in the comparator,
+// and '' means "any version", just remove the *s entirely.
+function replaceStars (comp, options) {
+ debug('replaceStars', comp, options)
+ // Looseness is ignored here. star is always as loose as it gets!
+ return comp.trim().replace(re[STAR], '')
+}
-/***/ }),
-/* 81 */,
-/* 82 */
-/***/ (function(__unusedmodule, exports) {
+// This function is passed to string.replace(re[HYPHENRANGE])
+// M, m, patch, prerelease, build
+// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
+// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do
+// 1.2 - 3.4 => >=1.2.0 <3.5.0
+function hyphenReplace ($0,
+ from, fM, fm, fp, fpr, fb,
+ to, tM, tm, tp, tpr, tb) {
+ if (isX(fM)) {
+ from = ''
+ } else if (isX(fm)) {
+ from = '>=' + fM + '.0.0'
+ } else if (isX(fp)) {
+ from = '>=' + fM + '.' + fm + '.0'
+ } else {
+ from = '>=' + from
+ }
-"use strict";
+ if (isX(tM)) {
+ to = ''
+ } else if (isX(tm)) {
+ to = '<' + (+tM + 1) + '.0.0'
+ } else if (isX(tp)) {
+ to = '<' + tM + '.' + (+tm + 1) + '.0'
+ } else if (tpr) {
+ to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr
+ } else {
+ to = '<=' + to
+ }
-// We use any as a valid input type
-/* eslint-disable @typescript-eslint/no-explicit-any */
-Object.defineProperty(exports, "__esModule", { value: true });
-/**
- * Sanitizes an input into a string so it can be passed into issueCommand safely
- * @param input input to sanitize into a string
- */
-function toCommandValue(input) {
- if (input === null || input === undefined) {
- return '';
- }
- else if (typeof input === 'string' || input instanceof String) {
- return input;
- }
- return JSON.stringify(input);
+ return (from + ' ' + to).trim()
}
-exports.toCommandValue = toCommandValue;
-//# sourceMappingURL=utils.js.map
-/***/ }),
-/* 83 */,
-/* 84 */,
-/* 85 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+// if ANY of the sets match ALL of its comparators, then pass
+Range.prototype.test = function (version) {
+ if (!version) {
+ return false
+ }
-"use strict";
+ if (typeof version === 'string') {
+ version = new SemVer(version, this.options)
+ }
+ for (var i = 0; i < this.set.length; i++) {
+ if (testSet(this.set[i], version, this.options)) {
+ return true
+ }
+ }
+ return false
+}
-module.exports = __webpack_require__(260)
+function testSet (set, version, options) {
+ for (var i = 0; i < set.length; i++) {
+ if (!set[i].test(version)) {
+ return false
+ }
+ }
+ if (version.prerelease.length && !options.includePrerelease) {
+ // Find the set of versions that are allowed to have prereleases
+ // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
+ // That should allow `1.2.3-pr.2` to pass.
+ // However, `1.2.4-alpha.notready` should NOT be allowed,
+ // even though it's within the range set by the comparators.
+ for (i = 0; i < set.length; i++) {
+ debug(set[i].semver)
+ if (set[i].semver === ANY) {
+ continue
+ }
-/***/ }),
-/* 86 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+ if (set[i].semver.prerelease.length > 0) {
+ var allowed = set[i].semver
+ if (allowed.major === version.major &&
+ allowed.minor === version.minor &&
+ allowed.patch === version.patch) {
+ return true
+ }
+ }
+ }
-var rng = __webpack_require__(139);
-var bytesToUuid = __webpack_require__(105);
+ // Version has a -pre, but it's not one of the ones we like.
+ return false
+ }
-// **`v1()` - Generate time-based UUID**
-//
-// Inspired by https://github.com/LiosK/UUID.js
-// and http://docs.python.org/library/uuid.html
+ return true
+}
-var _nodeId;
-var _clockseq;
+exports.satisfies = satisfies
+function satisfies (version, range, options) {
+ try {
+ range = new Range(range, options)
+ } catch (er) {
+ return false
+ }
+ return range.test(version)
+}
-// Previous uuid creation time
-var _lastMSecs = 0;
-var _lastNSecs = 0;
-
-// See https://github.com/uuidjs/uuid for API details
-function v1(options, buf, offset) {
- var i = buf && offset || 0;
- var b = buf || [];
-
- options = options || {};
- var node = options.node || _nodeId;
- var clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq;
-
- // node and clockseq need to be initialized to random values if they're not
- // specified. We do this lazily to minimize issues related to insufficient
- // system entropy. See #189
- if (node == null || clockseq == null) {
- var seedBytes = rng();
- if (node == null) {
- // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
- node = _nodeId = [
- seedBytes[0] | 0x01,
- seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]
- ];
- }
- if (clockseq == null) {
- // Per 4.2.2, randomize (14 bit) clockseq
- clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;
- }
+exports.maxSatisfying = maxSatisfying
+function maxSatisfying (versions, range, options) {
+ var max = null
+ var maxSV = null
+ try {
+ var rangeObj = new Range(range, options)
+ } catch (er) {
+ return null
}
+ versions.forEach(function (v) {
+ if (rangeObj.test(v)) {
+ // satisfies(v, range, options)
+ if (!max || maxSV.compare(v) === -1) {
+ // compare(max, v, true)
+ max = v
+ maxSV = new SemVer(max, options)
+ }
+ }
+ })
+ return max
+}
- // UUID timestamps are 100 nano-second units since the Gregorian epoch,
- // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so
- // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
- // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
- var msecs = options.msecs !== undefined ? options.msecs : new Date().getTime();
-
- // Per 4.2.1.2, use count of uuid's generated during the current clock
- // cycle to simulate higher resolution clock
- var nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1;
+exports.minSatisfying = minSatisfying
+function minSatisfying (versions, range, options) {
+ var min = null
+ var minSV = null
+ try {
+ var rangeObj = new Range(range, options)
+ } catch (er) {
+ return null
+ }
+ versions.forEach(function (v) {
+ if (rangeObj.test(v)) {
+ // satisfies(v, range, options)
+ if (!min || minSV.compare(v) === 1) {
+ // compare(min, v, true)
+ min = v
+ minSV = new SemVer(min, options)
+ }
+ }
+ })
+ return min
+}
- // Time since last uuid creation (in msecs)
- var dt = (msecs - _lastMSecs) + (nsecs - _lastNSecs)/10000;
+exports.minVersion = minVersion
+function minVersion (range, loose) {
+ range = new Range(range, loose)
- // Per 4.2.1.2, Bump clockseq on clock regression
- if (dt < 0 && options.clockseq === undefined) {
- clockseq = clockseq + 1 & 0x3fff;
+ var minver = new SemVer('0.0.0')
+ if (range.test(minver)) {
+ return minver
}
- // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
- // time interval
- if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {
- nsecs = 0;
+ minver = new SemVer('0.0.0-0')
+ if (range.test(minver)) {
+ return minver
}
- // Per 4.2.1.2 Throw error if too many uuids are requested
- if (nsecs >= 10000) {
- throw new Error('uuid.v1(): Can\'t create more than 10M uuids/sec');
+ minver = null
+ for (var i = 0; i < range.set.length; ++i) {
+ var comparators = range.set[i]
+
+ comparators.forEach(function (comparator) {
+ // Clone to avoid manipulating the comparator's semver object.
+ var compver = new SemVer(comparator.semver.version)
+ switch (comparator.operator) {
+ case '>':
+ if (compver.prerelease.length === 0) {
+ compver.patch++
+ } else {
+ compver.prerelease.push(0)
+ }
+ compver.raw = compver.format()
+ /* fallthrough */
+ case '':
+ case '>=':
+ if (!minver || gt(minver, compver)) {
+ minver = compver
+ }
+ break
+ case '<':
+ case '<=':
+ /* Ignore maximum versions */
+ break
+ /* istanbul ignore next */
+ default:
+ throw new Error('Unexpected operation: ' + comparator.operator)
+ }
+ })
}
- _lastMSecs = msecs;
- _lastNSecs = nsecs;
- _clockseq = clockseq;
+ if (minver && range.test(minver)) {
+ return minver
+ }
- // Per 4.1.4 - Convert from unix epoch to Gregorian epoch
- msecs += 12219292800000;
+ return null
+}
- // `time_low`
- var tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;
- b[i++] = tl >>> 24 & 0xff;
- b[i++] = tl >>> 16 & 0xff;
- b[i++] = tl >>> 8 & 0xff;
- b[i++] = tl & 0xff;
+exports.validRange = validRange
+function validRange (range, options) {
+ try {
+ // Return '*' instead of '' so that truthiness works.
+ // This will throw if it's invalid anyway
+ return new Range(range, options).range || '*'
+ } catch (er) {
+ return null
+ }
+}
- // `time_mid`
- var tmh = (msecs / 0x100000000 * 10000) & 0xfffffff;
- b[i++] = tmh >>> 8 & 0xff;
- b[i++] = tmh & 0xff;
+// Determine if version is less than all the versions possible in the range
+exports.ltr = ltr
+function ltr (version, range, options) {
+ return outside(version, range, '<', options)
+}
- // `time_high_and_version`
- b[i++] = tmh >>> 24 & 0xf | 0x10; // include version
- b[i++] = tmh >>> 16 & 0xff;
+// Determine if version is greater than all the versions possible in the range.
+exports.gtr = gtr
+function gtr (version, range, options) {
+ return outside(version, range, '>', options)
+}
- // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
- b[i++] = clockseq >>> 8 | 0x80;
+exports.outside = outside
+function outside (version, range, hilo, options) {
+ version = new SemVer(version, options)
+ range = new Range(range, options)
- // `clock_seq_low`
- b[i++] = clockseq & 0xff;
+ var gtfn, ltefn, ltfn, comp, ecomp
+ switch (hilo) {
+ case '>':
+ gtfn = gt
+ ltefn = lte
+ ltfn = lt
+ comp = '>'
+ ecomp = '>='
+ break
+ case '<':
+ gtfn = lt
+ ltefn = gte
+ ltfn = gt
+ comp = '<'
+ ecomp = '<='
+ break
+ default:
+ throw new TypeError('Must provide a hilo val of "<" or ">"')
+ }
- // `node`
- for (var n = 0; n < 6; ++n) {
- b[i + n] = node[n];
+ // If it satisifes the range it is not outside
+ if (satisfies(version, range, options)) {
+ return false
}
- return buf ? buf : bytesToUuid(b);
-}
+ // From now on, variable terms are as if we're in "gtr" mode.
+ // but note that everything is flipped for the "ltr" function.
-module.exports = v1;
+ for (var i = 0; i < range.set.length; ++i) {
+ var comparators = range.set[i]
+ var high = null
+ var low = null
-/***/ }),
-/* 87 */
-/***/ (function(module) {
+ comparators.forEach(function (comparator) {
+ if (comparator.semver === ANY) {
+ comparator = new Comparator('>=0.0.0')
+ }
+ high = high || comparator
+ low = low || comparator
+ if (gtfn(comparator.semver, high.semver, options)) {
+ high = comparator
+ } else if (ltfn(comparator.semver, low.semver, options)) {
+ low = comparator
+ }
+ })
-module.exports = require("os");
+ // If the edge version comparator has a operator then our version
+ // isn't outside it
+ if (high.operator === comp || high.operator === ecomp) {
+ return false
+ }
-/***/ }),
-/* 88 */,
-/* 89 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
+ // If the lowest version comparator has an operator and our version
+ // is less than it then it isn't higher than the range
+ if ((!low.operator || low.operator === comp) &&
+ ltefn(version, low.semver)) {
+ return false
+ } else if (low.operator === ecomp && ltfn(version, low.semver)) {
+ return false
+ }
+ }
+ return true
+}
-"use strict";
-/*!
- * Copyright (c) 2015, Salesforce.com, Inc.
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
- *
- * 2. Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
- *
- * 3. Neither the name of Salesforce.com nor the names of its contributors may
- * be used to endorse or promote products derived from this software without
- * specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
- * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
- * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
- * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
- * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
- * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
- * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
- * POSSIBILITY OF SUCH DAMAGE.
- */
+exports.prerelease = prerelease
+function prerelease (version, options) {
+ var parsed = parse(version, options)
+ return (parsed && parsed.prerelease.length) ? parsed.prerelease : null
+}
-const pubsuffix = __webpack_require__(562);
+exports.intersects = intersects
+function intersects (r1, r2, options) {
+ r1 = new Range(r1, options)
+ r2 = new Range(r2, options)
+ return r1.intersects(r2)
+}
-// Gives the permutation of all possible domainMatch()es of a given domain. The
-// array is in shortest-to-longest order. Handy for indexing.
-const SPECIAL_USE_DOMAINS = ["local"]; // RFC 6761
-function permuteDomain(domain, allowSpecialUseDomain) {
- let pubSuf = null;
- if (allowSpecialUseDomain) {
- const domainParts = domain.split(".");
- if (SPECIAL_USE_DOMAINS.includes(domainParts[domainParts.length - 1])) {
- pubSuf = `${domainParts[domainParts.length - 2]}.${
- domainParts[domainParts.length - 1]
- }`;
- } else {
- pubSuf = pubsuffix.getPublicSuffix(domain);
- }
- } else {
- pubSuf = pubsuffix.getPublicSuffix(domain);
+exports.coerce = coerce
+function coerce (version) {
+ if (version instanceof SemVer) {
+ return version
}
- if (!pubSuf) {
- return null;
- }
- if (pubSuf == domain) {
- return [domain];
+ if (typeof version !== 'string') {
+ return null
}
- const prefix = domain.slice(0, -(pubSuf.length + 1)); // ".example.com"
- const parts = prefix.split(".").reverse();
- let cur = pubSuf;
- const permutations = [cur];
- while (parts.length) {
- cur = `${parts.shift()}.${cur}`;
- permutations.push(cur);
+ var match = version.match(re[COERCE])
+
+ if (match == null) {
+ return null
}
- return permutations;
+
+ return parse(match[1] +
+ '.' + (match[2] || '0') +
+ '.' + (match[3] || '0'))
}
-exports.permuteDomain = permuteDomain;
+/***/ }),
+/* 58 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = __webpack_require__(600);
/***/ }),
-/* 90 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
+/* 59 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
-Object.defineProperty(exports, "__esModule", {
- value: true
-});
-exports.default = void 0;
+const BB = __webpack_require__(900)
-var _v = _interopRequireDefault(__webpack_require__(241));
+const cacache = __webpack_require__(426)
+const fetch = __webpack_require__(177)
+const fs = __webpack_require__(747)
+const npa = __webpack_require__(482)
+const optCheck = __webpack_require__(420)
+const path = __webpack_require__(622)
+const ssri = __webpack_require__(951)
+const retry = __webpack_require__(652)
-var _sha = _interopRequireDefault(__webpack_require__(616));
+const statAsync = BB.promisify(fs.stat)
-function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+const RETRIABLE_ERRORS = new Set(['ENOENT', 'EINTEGRITY', 'Z_DATA_ERROR'])
-const v5 = (0, _v.default)('v5', 0x50, _sha.default);
-var _default = v5;
-exports.default = _default;
+module.exports = withTarballStream
+function withTarballStream (spec, opts, streamHandler) {
+ opts = optCheck(opts)
+ spec = npa(spec, opts.where)
-/***/ }),
-/* 91 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+ // First, we check for a file: resolved shortcut
+ const tryFile = (
+ !opts.preferOnline &&
+ opts.integrity &&
+ opts.resolved &&
+ opts.resolved.startsWith('file:')
+ )
+ ? BB.try(() => {
+ // NOTE - this is a special shortcut! Packages installed as files do not
+ // have a `resolved` field -- this specific case only occurs when you have,
+ // say, a git dependency or a registry dependency that you've packaged into
+ // a local file, and put that file: spec in the `resolved` field.
+ opts.log.silly('pacote', `trying ${spec} by local file: ${opts.resolved}`)
+ const file = path.resolve(opts.where || '.', opts.resolved.substr(5))
+ return statAsync(file)
+ .then(() => {
+ const verifier = ssri.integrityStream({ integrity: opts.integrity })
+ const stream = fs.createReadStream(file)
+ .on('error', err => verifier.emit('error', err))
+ .pipe(verifier)
+ return streamHandler(stream)
+ })
+ .catch(err => {
+ if (err.code === 'EINTEGRITY') {
+ opts.log.warn('pacote', `EINTEGRITY while extracting ${spec} from ${file}.You will have to recreate the file.`)
+ opts.log.verbose('pacote', `EINTEGRITY for ${spec}: ${err.message}`)
+ }
+ throw err
+ })
+ })
+ : BB.reject(Object.assign(new Error('no file!'), { code: 'ENOENT' }))
-var serialOrdered = __webpack_require__(892);
+ const tryDigest = tryFile
+ .catch(err => {
+ if (
+ opts.preferOnline ||
+ !opts.cache ||
+ !opts.integrity ||
+ !RETRIABLE_ERRORS.has(err.code)
+ ) {
+ throw err
+ } else {
+ opts.log.silly('tarball', `trying ${spec} by hash: ${opts.integrity}`)
+ const stream = cacache.get.stream.byDigest(
+ opts.cache, opts.integrity, opts
+ )
+ stream.once('error', err => stream.on('newListener', (ev, l) => {
+ if (ev === 'error') { l(err) }
+ }))
+ return streamHandler(stream)
+ .catch(err => {
+ if (err.code === 'EINTEGRITY' || err.code === 'Z_DATA_ERROR') {
+ opts.log.warn('tarball', `cached data for ${spec} (${opts.integrity}) seems to be corrupted. Refreshing cache.`)
+ return cleanUpCached(opts.cache, opts.integrity, opts)
+ .then(() => { throw err })
+ } else {
+ throw err
+ }
+ })
+ }
+ })
-// Public API
-module.exports = serial;
+ const trySpec = tryDigest
+ .catch(err => {
+ if (!RETRIABLE_ERRORS.has(err.code)) {
+ // If it's not one of our retriable errors, bail out and give up.
+ throw err
+ } else {
+ opts.log.silly(
+ 'tarball',
+ `no local data for ${spec}. Extracting by manifest.`
+ )
+ return BB.resolve(retry((tryAgain, attemptNum) => {
+ const tardata = fetch.tarball(spec, opts)
+ if (!opts.resolved) {
+ tardata.on('manifest', m => {
+ opts = opts.concat({ resolved: m._resolved })
+ })
+ tardata.on('integrity', i => {
+ opts = opts.concat({ integrity: i })
+ })
+ }
+ return BB.try(() => streamHandler(tardata))
+ .catch(err => {
+ // Retry once if we have a cache, to clear up any weird conditions.
+ // Don't retry network errors, though -- make-fetch-happen has already
+ // taken care of making sure we're all set on that front.
+ if (opts.cache && err.code && !String(err.code).match(/^E\d{3}$/)) {
+ if (err.code === 'EINTEGRITY' || err.code === 'Z_DATA_ERROR') {
+ opts.log.warn('tarball', `tarball data for ${spec} (${opts.integrity}) seems to be corrupted. Trying one more time.`)
+ }
+ return cleanUpCached(opts.cache, err.sri, opts)
+ .then(() => tryAgain(err))
+ } else {
+ throw err
+ }
+ })
+ }, { retries: 1 }))
+ }
+ })
-/**
- * Runs iterator over provided array elements in series
- *
- * @param {array|object} list - array or object (named list) to iterate over
- * @param {function} iterator - iterator to run
- * @param {function} callback - invoked when all elements processed
- * @returns {function} - jobs terminator
- */
-function serial(list, iterator, callback)
-{
- return serialOrdered(list, iterator, null, callback);
+ return trySpec
+ .catch(err => {
+ if (err.code === 'EINTEGRITY') {
+ err.message = `Verification failed while extracting ${spec}:\n${err.message}`
+ }
+ throw err
+ })
+}
+
+function cleanUpCached (cachePath, integrity, opts) {
+ return cacache.rm.content(cachePath, integrity, opts)
}
/***/ }),
-/* 92 */
+/* 60 */
/***/ (function(module, __unusedexports, __webpack_require__) {
-"use strict";
-
-
-// Description of supported double byte encodings and aliases.
-// Tables are not require()-d until they are needed to speed up library load.
-// require()-s are direct to support Browserify.
+// Update with any zlib constants that are added or changed in the future.
+// Node v6 didn't export this, so we just hard code the version and rely
+// on all the other hard-coded values from zlib v4736. When node v6
+// support drops, we can just export the realZlibConstants object.
+const realZlibConstants = __webpack_require__(761).constants ||
+ /* istanbul ignore next */ { ZLIB_VERNUM: 4736 }
-module.exports = {
-
- // == Japanese/ShiftJIS ====================================================
- // All japanese encodings are based on JIS X set of standards:
- // JIS X 0201 - Single-byte encoding of ASCII + ¥ + Kana chars at 0xA1-0xDF.
- // JIS X 0208 - Main set of 6879 characters, placed in 94x94 plane, to be encoded by 2 bytes.
- // Has several variations in 1978, 1983, 1990 and 1997.
- // JIS X 0212 - Supplementary plane of 6067 chars in 94x94 plane. 1990. Effectively dead.
- // JIS X 0213 - Extension and modern replacement of 0208 and 0212. Total chars: 11233.
- // 2 planes, first is superset of 0208, second - revised 0212.
- // Introduced in 2000, revised 2004. Some characters are in Unicode Plane 2 (0x2xxxx)
-
- // Byte encodings are:
- // * Shift_JIS: Compatible with 0201, uses not defined chars in top half as lead bytes for double-byte
- // encoding of 0208. Lead byte ranges: 0x81-0x9F, 0xE0-0xEF; Trail byte ranges: 0x40-0x7E, 0x80-0x9E, 0x9F-0xFC.
- // Windows CP932 is a superset of Shift_JIS. Some companies added more chars, notably KDDI.
- // * EUC-JP: Up to 3 bytes per character. Used mostly on *nixes.
- // 0x00-0x7F - lower part of 0201
- // 0x8E, 0xA1-0xDF - upper part of 0201
- // (0xA1-0xFE)x2 - 0208 plane (94x94).
- // 0x8F, (0xA1-0xFE)x2 - 0212 plane (94x94).
- // * JIS X 208: 7-bit, direct encoding of 0208. Byte ranges: 0x21-0x7E (94 values). Uncommon.
- // Used as-is in ISO2022 family.
- // * ISO2022-JP: Stateful encoding, with escape sequences to switch between ASCII,
- // 0201-1976 Roman, 0208-1978, 0208-1983.
- // * ISO2022-JP-1: Adds esc seq for 0212-1990.
- // * ISO2022-JP-2: Adds esc seq for GB2313-1980, KSX1001-1992, ISO8859-1, ISO8859-7.
- // * ISO2022-JP-3: Adds esc seq for 0201-1976 Kana set, 0213-2000 Planes 1, 2.
- // * ISO2022-JP-2004: Adds 0213-2004 Plane 1.
- //
- // After JIS X 0213 appeared, Shift_JIS-2004, EUC-JISX0213 and ISO2022-JP-2004 followed, with just changing the planes.
- //
- // Overall, it seems that it's a mess :( http://www8.plala.or.jp/tkubota1/unicode-symbols-map2.html
-
- 'shiftjis': {
- type: '_dbcs',
- table: function() { return __webpack_require__(546) },
- encodeAdd: {'\u00a5': 0x5C, '\u203E': 0x7E},
- encodeSkipVals: [{from: 0xED40, to: 0xF940}],
- },
- 'csshiftjis': 'shiftjis',
- 'mskanji': 'shiftjis',
- 'sjis': 'shiftjis',
- 'windows31j': 'shiftjis',
- 'ms31j': 'shiftjis',
- 'xsjis': 'shiftjis',
- 'windows932': 'shiftjis',
- 'ms932': 'shiftjis',
- '932': 'shiftjis',
- 'cp932': 'shiftjis',
-
- 'eucjp': {
- type: '_dbcs',
- table: function() { return __webpack_require__(701) },
- encodeAdd: {'\u00a5': 0x5C, '\u203E': 0x7E},
- },
-
- // TODO: KDDI extension to Shift_JIS
- // TODO: IBM CCSID 942 = CP932, but F0-F9 custom chars and other char changes.
- // TODO: IBM CCSID 943 = Shift_JIS = CP932 with original Shift_JIS lower 128 chars.
+module.exports = Object.freeze(Object.assign(Object.create(null), {
+ Z_NO_FLUSH: 0,
+ Z_PARTIAL_FLUSH: 1,
+ Z_SYNC_FLUSH: 2,
+ Z_FULL_FLUSH: 3,
+ Z_FINISH: 4,
+ Z_BLOCK: 5,
+ Z_OK: 0,
+ Z_STREAM_END: 1,
+ Z_NEED_DICT: 2,
+ Z_ERRNO: -1,
+ Z_STREAM_ERROR: -2,
+ Z_DATA_ERROR: -3,
+ Z_MEM_ERROR: -4,
+ Z_BUF_ERROR: -5,
+ Z_VERSION_ERROR: -6,
+ Z_NO_COMPRESSION: 0,
+ Z_BEST_SPEED: 1,
+ Z_BEST_COMPRESSION: 9,
+ Z_DEFAULT_COMPRESSION: -1,
+ Z_FILTERED: 1,
+ Z_HUFFMAN_ONLY: 2,
+ Z_RLE: 3,
+ Z_FIXED: 4,
+ Z_DEFAULT_STRATEGY: 0,
+ DEFLATE: 1,
+ INFLATE: 2,
+ GZIP: 3,
+ GUNZIP: 4,
+ DEFLATERAW: 5,
+ INFLATERAW: 6,
+ UNZIP: 7,
+ BROTLI_DECODE: 8,
+ BROTLI_ENCODE: 9,
+ Z_MIN_WINDOWBITS: 8,
+ Z_MAX_WINDOWBITS: 15,
+ Z_DEFAULT_WINDOWBITS: 15,
+ Z_MIN_CHUNK: 64,
+ Z_MAX_CHUNK: Infinity,
+ Z_DEFAULT_CHUNK: 16384,
+ Z_MIN_MEMLEVEL: 1,
+ Z_MAX_MEMLEVEL: 9,
+ Z_DEFAULT_MEMLEVEL: 8,
+ Z_MIN_LEVEL: -1,
+ Z_MAX_LEVEL: 9,
+ Z_DEFAULT_LEVEL: -1,
+ BROTLI_OPERATION_PROCESS: 0,
+ BROTLI_OPERATION_FLUSH: 1,
+ BROTLI_OPERATION_FINISH: 2,
+ BROTLI_OPERATION_EMIT_METADATA: 3,
+ BROTLI_MODE_GENERIC: 0,
+ BROTLI_MODE_TEXT: 1,
+ BROTLI_MODE_FONT: 2,
+ BROTLI_DEFAULT_MODE: 0,
+ BROTLI_MIN_QUALITY: 0,
+ BROTLI_MAX_QUALITY: 11,
+ BROTLI_DEFAULT_QUALITY: 11,
+ BROTLI_MIN_WINDOW_BITS: 10,
+ BROTLI_MAX_WINDOW_BITS: 24,
+ BROTLI_LARGE_MAX_WINDOW_BITS: 30,
+ BROTLI_DEFAULT_WINDOW: 22,
+ BROTLI_MIN_INPUT_BLOCK_BITS: 16,
+ BROTLI_MAX_INPUT_BLOCK_BITS: 24,
+ BROTLI_PARAM_MODE: 0,
+ BROTLI_PARAM_QUALITY: 1,
+ BROTLI_PARAM_LGWIN: 2,
+ BROTLI_PARAM_LGBLOCK: 3,
+ BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
+ BROTLI_PARAM_SIZE_HINT: 5,
+ BROTLI_PARAM_LARGE_WINDOW: 6,
+ BROTLI_PARAM_NPOSTFIX: 7,
+ BROTLI_PARAM_NDIRECT: 8,
+ BROTLI_DECODER_RESULT_ERROR: 0,
+ BROTLI_DECODER_RESULT_SUCCESS: 1,
+ BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
+ BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
+ BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
+ BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
+ BROTLI_DECODER_NO_ERROR: 0,
+ BROTLI_DECODER_SUCCESS: 1,
+ BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
+ BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
+ BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
+ BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
+ BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
+ BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
+ BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
+ BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
+ BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
+ BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
+ BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
+ BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
+ BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
+ BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
+ BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
+ BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
+ BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
+ BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
+ BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
+ BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
+ BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
+ BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
+ BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
+ BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
+ BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
+ BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
+ BROTLI_DECODER_ERROR_UNREACHABLE: -31,
+}, realZlibConstants))
- // == Chinese/GBK ==========================================================
- // http://en.wikipedia.org/wiki/GBK
- // We mostly implement W3C recommendation: https://www.w3.org/TR/encoding/#gbk-encoder
+/***/ }),
+/* 61 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
- // Oldest GB2312 (1981, ~7600 chars) is a subset of CP936
- 'gb2312': 'cp936',
- 'gb231280': 'cp936',
- 'gb23121980': 'cp936',
- 'csgb2312': 'cp936',
- 'csiso58gb231280': 'cp936',
- 'euccn': 'cp936',
+"use strict";
- // Microsoft's CP936 is a subset and approximation of GBK.
- 'windows936': 'cp936',
- 'ms936': 'cp936',
- '936': 'cp936',
- 'cp936': {
- type: '_dbcs',
- table: function() { return __webpack_require__(680) },
- },
+var numberIsNan = __webpack_require__(530);
- // GBK (~22000 chars) is an extension of CP936 that added user-mapped chars and some other.
- 'gbk': {
- type: '_dbcs',
- table: function() { return __webpack_require__(680).concat(__webpack_require__(810)) },
- },
- 'xgbk': 'gbk',
- 'isoir58': 'gbk',
+module.exports = function (x) {
+ if (numberIsNan(x)) {
+ return false;
+ }
- // GB18030 is an algorithmic extension of GBK.
- // Main source: https://www.w3.org/TR/encoding/#gbk-encoder
- // http://icu-project.org/docs/papers/gb18030.html
- // http://source.icu-project.org/repos/icu/data/trunk/charset/data/xml/gb-18030-2000.xml
- // http://www.khngai.com/chinese/charmap/tblgbk.php?page=0
- 'gb18030': {
- type: '_dbcs',
- table: function() { return __webpack_require__(680).concat(__webpack_require__(810)) },
- gb18030: function() { return __webpack_require__(829) },
- encodeSkipVals: [0x80],
- encodeAdd: {'€': 0xA2E3},
- },
+ // https://github.com/nodejs/io.js/blob/cff7300a578be1b10001f2d967aaedc88aee6402/lib/readline.js#L1369
- 'chinese': 'gb18030',
+ // code points are derived from:
+ // http://www.unix.org/Public/UNIDATA/EastAsianWidth.txt
+ if (x >= 0x1100 && (
+ x <= 0x115f || // Hangul Jamo
+ 0x2329 === x || // LEFT-POINTING ANGLE BRACKET
+ 0x232a === x || // RIGHT-POINTING ANGLE BRACKET
+ // CJK Radicals Supplement .. Enclosed CJK Letters and Months
+ (0x2e80 <= x && x <= 0x3247 && x !== 0x303f) ||
+ // Enclosed CJK Letters and Months .. CJK Unified Ideographs Extension A
+ 0x3250 <= x && x <= 0x4dbf ||
+ // CJK Unified Ideographs .. Yi Radicals
+ 0x4e00 <= x && x <= 0xa4c6 ||
+ // Hangul Jamo Extended-A
+ 0xa960 <= x && x <= 0xa97c ||
+ // Hangul Syllables
+ 0xac00 <= x && x <= 0xd7a3 ||
+ // CJK Compatibility Ideographs
+ 0xf900 <= x && x <= 0xfaff ||
+ // Vertical Forms
+ 0xfe10 <= x && x <= 0xfe19 ||
+ // CJK Compatibility Forms .. Small Form Variants
+ 0xfe30 <= x && x <= 0xfe6b ||
+ // Halfwidth and Fullwidth Forms
+ 0xff01 <= x && x <= 0xff60 ||
+ 0xffe0 <= x && x <= 0xffe6 ||
+ // Kana Supplement
+ 0x1b000 <= x && x <= 0x1b001 ||
+ // Enclosed Ideographic Supplement
+ 0x1f200 <= x && x <= 0x1f251 ||
+ // CJK Unified Ideographs Extension B .. Tertiary Ideographic Plane
+ 0x20000 <= x && x <= 0x3fffd)) {
+ return true;
+ }
+ return false;
+}
- // == Korean ===============================================================
- // EUC-KR, KS_C_5601 and KS X 1001 are exactly the same.
- 'windows949': 'cp949',
- 'ms949': 'cp949',
- '949': 'cp949',
- 'cp949': {
- type: '_dbcs',
- table: function() { return __webpack_require__(488) },
- },
- 'cseuckr': 'cp949',
- 'csksc56011987': 'cp949',
- 'euckr': 'cp949',
- 'isoir149': 'cp949',
- 'korean': 'cp949',
- 'ksc56011987': 'cp949',
- 'ksc56011989': 'cp949',
- 'ksc5601': 'cp949',
+/***/ }),
+/* 62 */
+/***/ (function(__unusedmodule, exports) {
+exports.parse = exports.decode = decode
- // == Big5/Taiwan/Hong Kong ================================================
- // There are lots of tables for Big5 and cp950. Please see the following links for history:
- // http://moztw.org/docs/big5/ http://www.haible.de/bruno/charsets/conversion-tables/Big5.html
- // Variations, in roughly number of defined chars:
- // * Windows CP 950: Microsoft variant of Big5. Canonical: http://www.unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP950.TXT
- // * Windows CP 951: Microsoft variant of Big5-HKSCS-2001. Seems to be never public. http://me.abelcheung.org/articles/research/what-is-cp951/
- // * Big5-2003 (Taiwan standard) almost superset of cp950.
- // * Unicode-at-on (UAO) / Mozilla 1.8. Falling out of use on the Web. Not supported by other browsers.
- // * Big5-HKSCS (-2001, -2004, -2008). Hong Kong standard.
- // many unicode code points moved from PUA to Supplementary plane (U+2XXXX) over the years.
- // Plus, it has 4 combining sequences.
- // Seems that Mozilla refused to support it for 10 yrs. https://bugzilla.mozilla.org/show_bug.cgi?id=162431 https://bugzilla.mozilla.org/show_bug.cgi?id=310299
- // because big5-hkscs is the only encoding to include astral characters in non-algorithmic way.
- // Implementations are not consistent within browsers; sometimes labeled as just big5.
- // MS Internet Explorer switches from big5 to big5-hkscs when a patch applied.
- // Great discussion & recap of what's going on https://bugzilla.mozilla.org/show_bug.cgi?id=912470#c31
- // In the encoder, it might make sense to support encoding old PUA mappings to Big5 bytes seq-s.
- // Official spec: http://www.ogcio.gov.hk/en/business/tech_promotion/ccli/terms/doc/2003cmp_2008.txt
- // http://www.ogcio.gov.hk/tc/business/tech_promotion/ccli/terms/doc/hkscs-2008-big5-iso.txt
- //
- // Current understanding of how to deal with Big5(-HKSCS) is in the Encoding Standard, http://encoding.spec.whatwg.org/#big5-encoder
- // Unicode mapping (http://www.unicode.org/Public/MAPPINGS/OBSOLETE/EASTASIA/OTHER/BIG5.TXT) is said to be wrong.
+exports.stringify = exports.encode = encode
- 'windows950': 'cp950',
- 'ms950': 'cp950',
- '950': 'cp950',
- 'cp950': {
- type: '_dbcs',
- table: function() { return __webpack_require__(801) },
- },
+exports.safe = safe
+exports.unsafe = unsafe
- // Big5 has many variations and is an extension of cp950. We use Encoding Standard's as a consensus.
- 'big5': 'big5hkscs',
- 'big5hkscs': {
- type: '_dbcs',
- table: function() { return __webpack_require__(801).concat(__webpack_require__(958)) },
- encodeSkipVals: [0xa2cc],
- },
+var eol = typeof process !== 'undefined' &&
+ process.platform === 'win32' ? '\r\n' : '\n'
- 'cnbig5': 'big5hkscs',
- 'csbig5': 'big5hkscs',
- 'xxbig5': 'big5hkscs',
-};
+function encode (obj, opt) {
+ var children = []
+ var out = ''
+ if (typeof opt === 'string') {
+ opt = {
+ section: opt,
+ whitespace: false
+ }
+ } else {
+ opt = opt || {}
+ opt.whitespace = opt.whitespace === true
+ }
-/***/ }),
-/* 93 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+ var separator = opt.whitespace ? ' = ' : '='
-module.exports = minimatch
-minimatch.Minimatch = Minimatch
+ Object.keys(obj).forEach(function (k, _, __) {
+ var val = obj[k]
+ if (val && Array.isArray(val)) {
+ val.forEach(function (item) {
+ out += safe(k + '[]') + separator + safe(item) + '\n'
+ })
+ } else if (val && typeof val === 'object') {
+ children.push(k)
+ } else {
+ out += safe(k) + separator + safe(val) + eol
+ }
+ })
-var path = { sep: '/' }
-try {
- path = __webpack_require__(622)
-} catch (er) {}
+ if (opt.section && out.length) {
+ out = '[' + safe(opt.section) + ']' + eol + out
+ }
-var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
-var expand = __webpack_require__(306)
+ children.forEach(function (k, _, __) {
+ var nk = dotSplit(k).join('\\.')
+ var section = (opt.section ? opt.section + '.' : '') + nk
+ var child = encode(obj[k], {
+ section: section,
+ whitespace: opt.whitespace
+ })
+ if (out.length && child.length) {
+ out += eol
+ }
+ out += child
+ })
-var plTypes = {
- '!': { open: '(?:(?!(?:', close: '))[^/]*?)'},
- '?': { open: '(?:', close: ')?' },
- '+': { open: '(?:', close: ')+' },
- '*': { open: '(?:', close: ')*' },
- '@': { open: '(?:', close: ')' }
+ return out
}
-// any single thing other than /
-// don't need to escape / when using new RegExp()
-var qmark = '[^/]'
-
-// * => any number of characters
-var star = qmark + '*?'
+function dotSplit (str) {
+ return str.replace(/\1/g, '\u0002LITERAL\\1LITERAL\u0002')
+ .replace(/\\\./g, '\u0001')
+ .split(/\./).map(function (part) {
+ return part.replace(/\1/g, '\\.')
+ .replace(/\2LITERAL\\1LITERAL\2/g, '\u0001')
+ })
+}
-// ** when dots are allowed. Anything goes, except .. and .
-// not (^ or / followed by one or two dots followed by $ or /),
-// followed by anything, any number of times.
-var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?'
+function decode (str) {
+ var out = {}
+ var p = out
+ var section = null
+ // section |key = value
+ var re = /^\[([^\]]*)\]$|^([^=]+)(=(.*))?$/i
+ var lines = str.split(/[\r\n]+/g)
-// not a ^ or / followed by a dot,
-// followed by anything, any number of times.
-var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?'
+ lines.forEach(function (line, _, __) {
+ if (!line || line.match(/^\s*[;#]/)) return
+ var match = line.match(re)
+ if (!match) return
+ if (match[1] !== undefined) {
+ section = unsafe(match[1])
+ p = out[section] = out[section] || {}
+ return
+ }
+ var key = unsafe(match[2])
+ var value = match[3] ? unsafe(match[4]) : true
+ switch (value) {
+ case 'true':
+ case 'false':
+ case 'null': value = JSON.parse(value)
+ }
-// characters that need to be escaped in RegExp.
-var reSpecials = charSet('().*{}+?[]^$\\!')
+ // Convert keys with '[]' suffix to an array
+ if (key.length > 2 && key.slice(-2) === '[]') {
+ key = key.substring(0, key.length - 2)
+ if (!p[key]) {
+ p[key] = []
+ } else if (!Array.isArray(p[key])) {
+ p[key] = [p[key]]
+ }
+ }
-// "abc" -> { a:true, b:true, c:true }
-function charSet (s) {
- return s.split('').reduce(function (set, c) {
- set[c] = true
- return set
- }, {})
-}
+ // safeguard against resetting a previously defined
+ // array by accidentally forgetting the brackets
+ if (Array.isArray(p[key])) {
+ p[key].push(value)
+ } else {
+ p[key] = value
+ }
+ })
-// normalizes slashes.
-var slashSplit = /\/+/
+ // {a:{y:1},"a.b":{x:2}} --> {a:{y:1,b:{x:2}}}
+ // use a filter to return the keys that have to be deleted.
+ Object.keys(out).filter(function (k, _, __) {
+ if (!out[k] ||
+ typeof out[k] !== 'object' ||
+ Array.isArray(out[k])) {
+ return false
+ }
+ // see if the parent section is also an object.
+ // if so, add it to that, and mark this one for deletion
+ var parts = dotSplit(k)
+ var p = out
+ var l = parts.pop()
+ var nl = l.replace(/\\\./g, '.')
+ parts.forEach(function (part, _, __) {
+ if (!p[part] || typeof p[part] !== 'object') p[part] = {}
+ p = p[part]
+ })
+ if (p === out && nl === l) {
+ return false
+ }
+ p[nl] = out[k]
+ return true
+ }).forEach(function (del, _, __) {
+ delete out[del]
+ })
-minimatch.filter = filter
-function filter (pattern, options) {
- options = options || {}
- return function (p, i, list) {
- return minimatch(p, pattern, options)
- }
+ return out
}
-function ext (a, b) {
- a = a || {}
- b = b || {}
- var t = {}
- Object.keys(b).forEach(function (k) {
- t[k] = b[k]
- })
- Object.keys(a).forEach(function (k) {
- t[k] = a[k]
- })
- return t
+function isQuoted (val) {
+ return (val.charAt(0) === '"' && val.slice(-1) === '"') ||
+ (val.charAt(0) === "'" && val.slice(-1) === "'")
}
-minimatch.defaults = function (def) {
- if (!def || !Object.keys(def).length) return minimatch
-
- var orig = minimatch
-
- var m = function minimatch (p, pattern, options) {
- return orig.minimatch(p, pattern, ext(def, options))
- }
-
- m.Minimatch = function Minimatch (pattern, options) {
- return new orig.Minimatch(pattern, ext(def, options))
- }
-
- return m
+function safe (val) {
+ return (typeof val !== 'string' ||
+ val.match(/[=\r\n]/) ||
+ val.match(/^\[/) ||
+ (val.length > 1 &&
+ isQuoted(val)) ||
+ val !== val.trim())
+ ? JSON.stringify(val)
+ : val.replace(/;/g, '\\;').replace(/#/g, '\\#')
}
-Minimatch.defaults = function (def) {
- if (!def || !Object.keys(def).length) return Minimatch
- return minimatch.defaults(def).Minimatch
-}
-
-function minimatch (p, pattern, options) {
- if (typeof pattern !== 'string') {
- throw new TypeError('glob pattern string required')
+function unsafe (val, doUnesc) {
+ val = (val || '').trim()
+ if (isQuoted(val)) {
+ // remove the single quotes before calling JSON.parse
+ if (val.charAt(0) === "'") {
+ val = val.substr(1, val.length - 2)
+ }
+ try { val = JSON.parse(val) } catch (_) {}
+ } else {
+ // walk the val to find the first not-escaped ; character
+ var esc = false
+ var unesc = ''
+ for (var i = 0, l = val.length; i < l; i++) {
+ var c = val.charAt(i)
+ if (esc) {
+ if ('\\;#'.indexOf(c) !== -1) {
+ unesc += c
+ } else {
+ unesc += '\\' + c
+ }
+ esc = false
+ } else if (';#'.indexOf(c) !== -1) {
+ break
+ } else if (c === '\\') {
+ esc = true
+ } else {
+ unesc += c
+ }
+ }
+ if (esc) {
+ unesc += '\\'
+ }
+ return unesc.trim()
}
+ return val
+}
- if (!options) options = {}
- // shortcut: comments match nothing.
- if (!options.nocomment && pattern.charAt(0) === '#') {
- return false
- }
+/***/ }),
+/* 63 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
- // "" only matches ""
- if (pattern.trim() === '') return p === ''
+"use strict";
- return new Minimatch(pattern, options).match(p)
-}
-function Minimatch (pattern, options) {
- if (!(this instanceof Minimatch)) {
- return new Minimatch(pattern, options)
- }
+const assert = __webpack_require__(357)
+const EE = __webpack_require__(614).EventEmitter
+const Parser = __webpack_require__(203)
+const fs = __webpack_require__(747)
+const fsm = __webpack_require__(827)
+const path = __webpack_require__(622)
+const mkdir = __webpack_require__(282)
+const mkdirSync = mkdir.sync
+const wc = __webpack_require__(478)
- if (typeof pattern !== 'string') {
- throw new TypeError('glob pattern string required')
- }
+const ONENTRY = Symbol('onEntry')
+const CHECKFS = Symbol('checkFs')
+const ISREUSABLE = Symbol('isReusable')
+const MAKEFS = Symbol('makeFs')
+const FILE = Symbol('file')
+const DIRECTORY = Symbol('directory')
+const LINK = Symbol('link')
+const SYMLINK = Symbol('symlink')
+const HARDLINK = Symbol('hardlink')
+const UNSUPPORTED = Symbol('unsupported')
+const UNKNOWN = Symbol('unknown')
+const CHECKPATH = Symbol('checkPath')
+const MKDIR = Symbol('mkdir')
+const ONERROR = Symbol('onError')
+const PENDING = Symbol('pending')
+const PEND = Symbol('pend')
+const UNPEND = Symbol('unpend')
+const ENDED = Symbol('ended')
+const MAYBECLOSE = Symbol('maybeClose')
+const SKIP = Symbol('skip')
+const DOCHOWN = Symbol('doChown')
+const UID = Symbol('uid')
+const GID = Symbol('gid')
+const crypto = __webpack_require__(417)
- if (!options) options = {}
- pattern = pattern.trim()
+// Unlinks on Windows are not atomic.
+//
+// This means that if you have a file entry, followed by another
+// file entry with an identical name, and you cannot re-use the file
+// (because it's a hardlink, or because unlink:true is set, or it's
+// Windows, which does not have useful nlink values), then the unlink
+// will be committed to the disk AFTER the new file has been written
+// over the old one, deleting the new file.
+//
+// To work around this, on Windows systems, we rename the file and then
+// delete the renamed file. It's a sloppy kludge, but frankly, I do not
+// know of a better way to do this, given windows' non-atomic unlink
+// semantics.
+//
+// See: https://github.com/npm/node-tar/issues/183
+/* istanbul ignore next */
+const unlinkFile = (path, cb) => {
+ if (process.platform !== 'win32')
+ return fs.unlink(path, cb)
- // windows support: need to use /, not \
- if (path.sep !== '/') {
- pattern = pattern.split(path.sep).join('/')
- }
+ const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex')
+ fs.rename(path, name, er => {
+ if (er)
+ return cb(er)
+ fs.unlink(name, cb)
+ })
+}
- this.options = options
- this.set = []
- this.pattern = pattern
- this.regexp = null
- this.negate = false
- this.comment = false
- this.empty = false
+/* istanbul ignore next */
+const unlinkFileSync = path => {
+ if (process.platform !== 'win32')
+ return fs.unlinkSync(path)
- // make the set of regexps etc.
- this.make()
+ const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex')
+ fs.renameSync(path, name)
+ fs.unlinkSync(name)
}
-Minimatch.prototype.debug = function () {}
+// this.gid, entry.gid, this.processUid
+const uint32 = (a, b, c) =>
+ a === a >>> 0 ? a
+ : b === b >>> 0 ? b
+ : c
-Minimatch.prototype.make = make
-function make () {
- // don't do it more than once.
- if (this._made) return
+class Unpack extends Parser {
+ constructor (opt) {
+ if (!opt)
+ opt = {}
- var pattern = this.pattern
- var options = this.options
+ opt.ondone = _ => {
+ this[ENDED] = true
+ this[MAYBECLOSE]()
+ }
- // empty patterns and comments match nothing.
- if (!options.nocomment && pattern.charAt(0) === '#') {
- this.comment = true
- return
- }
- if (!pattern) {
- this.empty = true
- return
- }
+ super(opt)
- // step 1: figure out negation, etc.
- this.parseNegate()
+ this.transform = typeof opt.transform === 'function' ? opt.transform : null
- // step 2: expand braces
- var set = this.globSet = this.braceExpand()
+ this.writable = true
+ this.readable = false
- if (options.debug) this.debug = console.error
+ this[PENDING] = 0
+ this[ENDED] = false
- this.debug(this.pattern, set)
+ this.dirCache = opt.dirCache || new Map()
- // step 3: now we have a set, so turn each one into a series of path-portion
- // matching patterns.
- // These will be regexps, except in the case of "**", which is
- // set to the GLOBSTAR object for globstar behavior,
- // and will not contain any / characters
- set = this.globParts = set.map(function (s) {
- return s.split(slashSplit)
- })
+ if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
+ // need both or neither
+ if (typeof opt.uid !== 'number' || typeof opt.gid !== 'number')
+ throw new TypeError('cannot set owner without number uid and gid')
+ if (opt.preserveOwner)
+ throw new TypeError(
+ 'cannot preserve owner in archive and also set owner explicitly')
+ this.uid = opt.uid
+ this.gid = opt.gid
+ this.setOwner = true
+ } else {
+ this.uid = null
+ this.gid = null
+ this.setOwner = false
+ }
- this.debug(this.pattern, set)
+ // default true for root
+ if (opt.preserveOwner === undefined && typeof opt.uid !== 'number')
+ this.preserveOwner = process.getuid && process.getuid() === 0
+ else
+ this.preserveOwner = !!opt.preserveOwner
- // glob --> regexps
- set = set.map(function (s, si, set) {
- return s.map(this.parse, this)
- }, this)
+ this.processUid = (this.preserveOwner || this.setOwner) && process.getuid ?
+ process.getuid() : null
+ this.processGid = (this.preserveOwner || this.setOwner) && process.getgid ?
+ process.getgid() : null
- this.debug(this.pattern, set)
+ // mostly just for testing, but useful in some cases.
+ // Forcibly trigger a chown on every entry, no matter what
+ this.forceChown = opt.forceChown === true
- // filter out everything that didn't compile properly.
- set = set.filter(function (s) {
- return s.indexOf(false) === -1
- })
+ // turn >| in filenames into 0xf000-higher encoded forms
+ this.win32 = !!opt.win32 || process.platform === 'win32'
- this.debug(this.pattern, set)
+ // do not unpack over files that are newer than what's in the archive
+ this.newer = !!opt.newer
- this.set = set
-}
+ // do not unpack over ANY files
+ this.keep = !!opt.keep
-Minimatch.prototype.parseNegate = parseNegate
-function parseNegate () {
- var pattern = this.pattern
- var negate = false
- var options = this.options
- var negateOffset = 0
+ // do not set mtime/atime of extracted entries
+ this.noMtime = !!opt.noMtime
- if (options.nonegate) return
+ // allow .., absolute path entries, and unpacking through symlinks
+ // without this, warn and skip .., relativize absolutes, and error
+ // on symlinks in extraction path
+ this.preservePaths = !!opt.preservePaths
- for (var i = 0, l = pattern.length
- ; i < l && pattern.charAt(i) === '!'
- ; i++) {
- negate = !negate
- negateOffset++
- }
+ // unlink files and links before writing. This breaks existing hard
+ // links, and removes symlink directories rather than erroring
+ this.unlink = !!opt.unlink
- if (negateOffset) this.pattern = pattern.substr(negateOffset)
- this.negate = negate
-}
+ this.cwd = path.resolve(opt.cwd || process.cwd())
+ this.strip = +opt.strip || 0
+ this.processUmask = process.umask()
+ this.umask = typeof opt.umask === 'number' ? opt.umask : this.processUmask
+ // default mode for dirs created as parents
+ this.dmode = opt.dmode || (0o0777 & (~this.umask))
+ this.fmode = opt.fmode || (0o0666 & (~this.umask))
+ this.on('entry', entry => this[ONENTRY](entry))
+ }
-// Brace expansion:
-// a{b,c}d -> abd acd
-// a{b,}c -> abc ac
-// a{0..3}d -> a0d a1d a2d a3d
-// a{b,c{d,e}f}g -> abg acdfg acefg
-// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
-//
-// Invalid sets are not expanded.
-// a{2..}b -> a{2..}b
-// a{b}c -> a{b}c
-minimatch.braceExpand = function (pattern, options) {
- return braceExpand(pattern, options)
-}
+ [MAYBECLOSE] () {
+ if (this[ENDED] && this[PENDING] === 0) {
+ this.emit('prefinish')
+ this.emit('finish')
+ this.emit('end')
+ this.emit('close')
+ }
+ }
-Minimatch.prototype.braceExpand = braceExpand
+ [CHECKPATH] (entry) {
+ if (this.strip) {
+ const parts = entry.path.split(/\/|\\/)
+ if (parts.length < this.strip)
+ return false
+ entry.path = parts.slice(this.strip).join('/')
-function braceExpand (pattern, options) {
- if (!options) {
- if (this instanceof Minimatch) {
- options = this.options
- } else {
- options = {}
+ if (entry.type === 'Link') {
+ const linkparts = entry.linkpath.split(/\/|\\/)
+ if (linkparts.length >= this.strip)
+ entry.linkpath = linkparts.slice(this.strip).join('/')
+ }
}
- }
- pattern = typeof pattern === 'undefined'
- ? this.pattern : pattern
+ if (!this.preservePaths) {
+ const p = entry.path
+ if (p.match(/(^|\/|\\)\.\.(\\|\/|$)/)) {
+ this.warn('path contains \'..\'', p)
+ return false
+ }
- if (typeof pattern === 'undefined') {
- throw new TypeError('undefined pattern')
- }
+ // absolutes on posix are also absolutes on win32
+ // so we only need to test this one to get both
+ if (path.win32.isAbsolute(p)) {
+ const parsed = path.win32.parse(p)
+ this.warn('stripping ' + parsed.root + ' from absolute path', p)
+ entry.path = p.substr(parsed.root.length)
+ }
+ }
- if (options.nobrace ||
- !pattern.match(/\{.*\}/)) {
- // shortcut. no need to expand.
- return [pattern]
- }
+ // only encode : chars that aren't drive letter indicators
+ if (this.win32) {
+ const parsed = path.win32.parse(entry.path)
+ entry.path = parsed.root === '' ? wc.encode(entry.path)
+ : parsed.root + wc.encode(entry.path.substr(parsed.root.length))
+ }
- return expand(pattern)
-}
+ if (path.isAbsolute(entry.path))
+ entry.absolute = entry.path
+ else
+ entry.absolute = path.resolve(this.cwd, entry.path)
-// parse a component of the expanded set.
-// At this point, no pattern may contain "/" in it
-// so we're going to return a 2d array, where each entry is the full
-// pattern, split on '/', and then turned into a regular expression.
-// A regexp is made at the end which joins each array with an
-// escaped /, and another full one which joins each regexp with |.
-//
-// Following the lead of Bash 4.1, note that "**" only has special meaning
-// when it is the *only* thing in a path portion. Otherwise, any series
-// of * is equivalent to a single *. Globstar behavior is enabled by
-// default, and can be disabled by setting options.noglobstar.
-Minimatch.prototype.parse = parse
-var SUBPARSE = {}
-function parse (pattern, isSub) {
- if (pattern.length > 1024 * 64) {
- throw new TypeError('pattern is too long')
+ return true
}
- var options = this.options
+ [ONENTRY] (entry) {
+ if (!this[CHECKPATH](entry))
+ return entry.resume()
- // shortcuts
- if (!options.noglobstar && pattern === '**') return GLOBSTAR
- if (pattern === '') return ''
+ assert.equal(typeof entry.absolute, 'string')
- var re = ''
- var hasMagic = !!options.nocase
- var escaping = false
- // ? => one single character
- var patternListStack = []
- var negativeLists = []
- var stateChar
- var inClass = false
- var reClassStart = -1
- var classStart = -1
- // . and .. never match anything that doesn't start with .,
- // even when options.dot is set.
- var patternStart = pattern.charAt(0) === '.' ? '' // anything
- // not (start or / followed by . or .. followed by / or end)
- : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))'
- : '(?!\\.)'
- var self = this
+ switch (entry.type) {
+ case 'Directory':
+ case 'GNUDumpDir':
+ if (entry.mode)
+ entry.mode = entry.mode | 0o700
- function clearStateChar () {
- if (stateChar) {
- // we had some state-tracking character
- // that wasn't consumed by this pass.
- switch (stateChar) {
- case '*':
- re += star
- hasMagic = true
- break
- case '?':
- re += qmark
- hasMagic = true
- break
- default:
- re += '\\' + stateChar
- break
- }
- self.debug('clearStateChar %j %j', stateChar, re)
- stateChar = false
+ case 'File':
+ case 'OldFile':
+ case 'ContiguousFile':
+ case 'Link':
+ case 'SymbolicLink':
+ return this[CHECKFS](entry)
+
+ case 'CharacterDevice':
+ case 'BlockDevice':
+ case 'FIFO':
+ return this[UNSUPPORTED](entry)
}
}
- for (var i = 0, len = pattern.length, c
- ; (i < len) && (c = pattern.charAt(i))
- ; i++) {
- this.debug('%s\t%s %s %j', pattern, i, re, c)
-
- // skip over any that are escaped.
- if (escaping && reSpecials[c]) {
- re += '\\' + c
- escaping = false
- continue
+ [ONERROR] (er, entry) {
+ // Cwd has to exist, or else nothing works. That's serious.
+ // Other errors are warnings, which raise the error in strict
+ // mode, but otherwise continue on.
+ if (er.name === 'CwdError')
+ this.emit('error', er)
+ else {
+ this.warn(er.message, er)
+ this[UNPEND]()
+ entry.resume()
}
+ }
- switch (c) {
- case '/':
- // completely not allowed, even escaped.
- // Should already be path-split by now.
- return false
+ [MKDIR] (dir, mode, cb) {
+ mkdir(dir, {
+ uid: this.uid,
+ gid: this.gid,
+ processUid: this.processUid,
+ processGid: this.processGid,
+ umask: this.processUmask,
+ preserve: this.preservePaths,
+ unlink: this.unlink,
+ cache: this.dirCache,
+ cwd: this.cwd,
+ mode: mode
+ }, cb)
+ }
- case '\\':
- clearStateChar()
- escaping = true
- continue
+ [DOCHOWN] (entry) {
+ // in preserve owner mode, chown if the entry doesn't match process
+ // in set owner mode, chown if setting doesn't match process
+ return this.forceChown ||
+ this.preserveOwner &&
+ ( typeof entry.uid === 'number' && entry.uid !== this.processUid ||
+ typeof entry.gid === 'number' && entry.gid !== this.processGid )
+ ||
+ ( typeof this.uid === 'number' && this.uid !== this.processUid ||
+ typeof this.gid === 'number' && this.gid !== this.processGid )
+ }
- // the various stateChar values
- // for the "extglob" stuff.
- case '?':
- case '*':
- case '+':
- case '@':
- case '!':
- this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c)
+ [UID] (entry) {
+ return uint32(this.uid, entry.uid, this.processUid)
+ }
- // all of those are literals inside a class, except that
- // the glob [!a] means [^a] in regexp
- if (inClass) {
- this.debug(' in class')
- if (c === '!' && i === classStart + 1) c = '^'
- re += c
- continue
- }
+ [GID] (entry) {
+ return uint32(this.gid, entry.gid, this.processGid)
+ }
- // if we already have a stateChar, then it means
- // that there was something like ** or +? in there.
- // Handle the stateChar, then proceed with this one.
- self.debug('call clearStateChar %j', stateChar)
- clearStateChar()
- stateChar = c
- // if extglob is disabled, then +(asdf|foo) isn't a thing.
- // just clear the statechar *now*, rather than even diving into
- // the patternList stuff.
- if (options.noext) clearStateChar()
- continue
-
- case '(':
- if (inClass) {
- re += '('
- continue
- }
-
- if (!stateChar) {
- re += '\\('
- continue
- }
-
- patternListStack.push({
- type: stateChar,
- start: i - 1,
- reStart: re.length,
- open: plTypes[stateChar].open,
- close: plTypes[stateChar].close
- })
- // negation is (?:(?!js)[^/]*)
- re += stateChar === '!' ? '(?:(?!(?:' : '(?:'
- this.debug('plType %j %j', stateChar, re)
- stateChar = false
- continue
-
- case ')':
- if (inClass || !patternListStack.length) {
- re += '\\)'
- continue
- }
-
- clearStateChar()
- hasMagic = true
- var pl = patternListStack.pop()
- // negation is (?:(?!js)[^/]*)
- // The others are (?:)
- re += pl.close
- if (pl.type === '!') {
- negativeLists.push(pl)
- }
- pl.reEnd = re.length
- continue
-
- case '|':
- if (inClass || !patternListStack.length || escaping) {
- re += '\\|'
- escaping = false
- continue
- }
+ [FILE] (entry) {
+ const mode = entry.mode & 0o7777 || this.fmode
+ const stream = new fsm.WriteStream(entry.absolute, {
+ mode: mode,
+ autoClose: false
+ })
+ stream.on('error', er => this[ONERROR](er, entry))
- clearStateChar()
- re += '|'
- continue
+ let actions = 1
+ const done = er => {
+ if (er)
+ return this[ONERROR](er, entry)
- // these are mostly the same in regexp and glob
- case '[':
- // swallow any state-tracking char before the [
- clearStateChar()
+ if (--actions === 0)
+ fs.close(stream.fd, _ => this[UNPEND]())
+ }
- if (inClass) {
- re += '\\' + c
- continue
- }
+ stream.on('finish', _ => {
+ // if futimes fails, try utimes
+ // if utimes fails, fail with the original error
+ // same for fchown/chown
+ const abs = entry.absolute
+ const fd = stream.fd
- inClass = true
- classStart = i
- reClassStart = re.length
- re += c
- continue
+ if (entry.mtime && !this.noMtime) {
+ actions++
+ const atime = entry.atime || new Date()
+ const mtime = entry.mtime
+ fs.futimes(fd, atime, mtime, er =>
+ er ? fs.utimes(abs, atime, mtime, er2 => done(er2 && er))
+ : done())
+ }
- case ']':
- // a right bracket shall lose its special
- // meaning and represent itself in
- // a bracket expression if it occurs
- // first in the list. -- POSIX.2 2.8.3.2
- if (i === classStart + 1 || !inClass) {
- re += '\\' + c
- escaping = false
- continue
- }
+ if (this[DOCHOWN](entry)) {
+ actions++
+ const uid = this[UID](entry)
+ const gid = this[GID](entry)
+ fs.fchown(fd, uid, gid, er =>
+ er ? fs.chown(abs, uid, gid, er2 => done(er2 && er))
+ : done())
+ }
- // handle the case where we left a class open.
- // "[z-a]" is valid, equivalent to "\[z-a\]"
- if (inClass) {
- // split where the last [ was, make sure we don't have
- // an invalid re. if so, re-walk the contents of the
- // would-be class to re-translate any characters that
- // were passed through as-is
- // TODO: It would probably be faster to determine this
- // without a try/catch and a new RegExp, but it's tricky
- // to do safely. For now, this is safe and works.
- var cs = pattern.substring(classStart + 1, i)
- try {
- RegExp('[' + cs + ']')
- } catch (er) {
- // not a valid class!
- var sp = this.parse(cs, SUBPARSE)
- re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]'
- hasMagic = hasMagic || sp[1]
- inClass = false
- continue
- }
- }
+ done()
+ })
- // finish up the class.
- hasMagic = true
- inClass = false
- re += c
- continue
+ const tx = this.transform ? this.transform(entry) || entry : entry
+ if (tx !== entry) {
+ tx.on('error', er => this[ONERROR](er, entry))
+ entry.pipe(tx)
+ }
+ tx.pipe(stream)
+ }
- default:
- // swallow any state char that wasn't consumed
- clearStateChar()
+ [DIRECTORY] (entry) {
+ const mode = entry.mode & 0o7777 || this.dmode
+ this[MKDIR](entry.absolute, mode, er => {
+ if (er)
+ return this[ONERROR](er, entry)
- if (escaping) {
- // no need
- escaping = false
- } else if (reSpecials[c]
- && !(c === '^' && inClass)) {
- re += '\\'
+ let actions = 1
+ const done = _ => {
+ if (--actions === 0) {
+ this[UNPEND]()
+ entry.resume()
}
+ }
- re += c
-
- } // switch
- } // for
-
- // handle the case where we left a class open.
- // "[abc" is valid, equivalent to "\[abc"
- if (inClass) {
- // split where the last [ was, and escape it
- // this is a huge pita. We now have to re-walk
- // the contents of the would-be class to re-translate
- // any characters that were passed through as-is
- cs = pattern.substr(classStart + 1)
- sp = this.parse(cs, SUBPARSE)
- re = re.substr(0, reClassStart) + '\\[' + sp[0]
- hasMagic = hasMagic || sp[1]
- }
+ if (entry.mtime && !this.noMtime) {
+ actions++
+ fs.utimes(entry.absolute, entry.atime || new Date(), entry.mtime, done)
+ }
- // handle the case where we had a +( thing at the *end*
- // of the pattern.
- // each pattern list stack adds 3 chars, and we need to go through
- // and escape any | chars that were passed through as-is for the regexp.
- // Go through and escape them, taking care not to double-escape any
- // | chars that were already escaped.
- for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {
- var tail = re.slice(pl.reStart + pl.open.length)
- this.debug('setting tail', re, pl)
- // maybe some even number of \, then maybe 1 \, followed by a |
- tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) {
- if (!$2) {
- // the | isn't already escaped, so escape it.
- $2 = '\\'
+ if (this[DOCHOWN](entry)) {
+ actions++
+ fs.chown(entry.absolute, this[UID](entry), this[GID](entry), done)
}
- // need to escape all those slashes *again*, without escaping the
- // one that we need for escaping the | character. As it works out,
- // escaping an even number of slashes can be done by simply repeating
- // it exactly after itself. That's why this trick works.
- //
- // I am sorry that you have to see this.
- return $1 + $1 + $2 + '|'
+ done()
})
-
- this.debug('tail=%j\n %s', tail, tail, pl, re)
- var t = pl.type === '*' ? star
- : pl.type === '?' ? qmark
- : '\\' + pl.type
-
- hasMagic = true
- re = re.slice(0, pl.reStart) + t + '\\(' + tail
}
- // handle trailing things that only matter at the very end.
- clearStateChar()
- if (escaping) {
- // trailing \\
- re += '\\\\'
+ [UNSUPPORTED] (entry) {
+ this.warn('unsupported entry type: ' + entry.type, entry)
+ entry.resume()
}
- // only need to apply the nodot start if the re starts with
- // something that could conceivably capture a dot
- var addPatternStart = false
- switch (re.charAt(0)) {
- case '.':
- case '[':
- case '(': addPatternStart = true
+ [SYMLINK] (entry) {
+ this[LINK](entry, entry.linkpath, 'symlink')
}
- // Hack to work around lack of negative lookbehind in JS
- // A pattern like: *.!(x).!(y|z) needs to ensure that a name
- // like 'a.xyz.yz' doesn't match. So, the first negative
- // lookahead, has to look ALL the way ahead, to the end of
- // the pattern.
- for (var n = negativeLists.length - 1; n > -1; n--) {
- var nl = negativeLists[n]
-
- var nlBefore = re.slice(0, nl.reStart)
- var nlFirst = re.slice(nl.reStart, nl.reEnd - 8)
- var nlLast = re.slice(nl.reEnd - 8, nl.reEnd)
- var nlAfter = re.slice(nl.reEnd)
-
- nlLast += nlAfter
-
- // Handle nested stuff like *(*.js|!(*.json)), where open parens
- // mean that we should *not* include the ) in the bit that is considered
- // "after" the negated section.
- var openParensBefore = nlBefore.split('(').length - 1
- var cleanAfter = nlAfter
- for (i = 0; i < openParensBefore; i++) {
- cleanAfter = cleanAfter.replace(/\)[+*?]?/, '')
- }
- nlAfter = cleanAfter
-
- var dollar = ''
- if (nlAfter === '' && isSub !== SUBPARSE) {
- dollar = '$'
- }
- var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast
- re = newRe
+ [HARDLINK] (entry) {
+ this[LINK](entry, path.resolve(this.cwd, entry.linkpath), 'link')
}
- // if the re is not "" at this point, then we need to make sure
- // it doesn't match against an empty path part.
- // Otherwise a/* will match a/, which it should not.
- if (re !== '' && hasMagic) {
- re = '(?=.)' + re
+ [PEND] () {
+ this[PENDING]++
}
- if (addPatternStart) {
- re = patternStart + re
+ [UNPEND] () {
+ this[PENDING]--
+ this[MAYBECLOSE]()
}
- // parsing just a piece of a larger pattern.
- if (isSub === SUBPARSE) {
- return [re, hasMagic]
+ [SKIP] (entry) {
+ this[UNPEND]()
+ entry.resume()
}
- // skip the regexp for non-magical patterns
- // unescape anything in it, though, so that it'll be
- // an exact match against a file etc.
- if (!hasMagic) {
- return globUnescape(pattern)
+ // Check if we can reuse an existing filesystem entry safely and
+ // overwrite it, rather than unlinking and recreating
+ // Windows doesn't report a useful nlink, so we just never reuse entries
+ [ISREUSABLE] (entry, st) {
+ return entry.type === 'File' &&
+ !this.unlink &&
+ st.isFile() &&
+ st.nlink <= 1 &&
+ process.platform !== 'win32'
}
- var flags = options.nocase ? 'i' : ''
- try {
- var regExp = new RegExp('^' + re + '$', flags)
- } catch (er) {
- // If it was an invalid regular expression, then it can't match
- // anything. This trick looks for a character after the end of
- // the string, which is of course impossible, except in multi-line
- // mode, but it's not a /m regex.
- return new RegExp('$.')
+ // check if a thing is there, and if so, try to clobber it
+ [CHECKFS] (entry) {
+ this[PEND]()
+ this[MKDIR](path.dirname(entry.absolute), this.dmode, er => {
+ if (er)
+ return this[ONERROR](er, entry)
+ fs.lstat(entry.absolute, (er, st) => {
+ if (st && (this.keep || this.newer && st.mtime > entry.mtime))
+ this[SKIP](entry)
+ else if (er || this[ISREUSABLE](entry, st))
+ this[MAKEFS](null, entry)
+ else if (st.isDirectory()) {
+ if (entry.type === 'Directory') {
+ if (!entry.mode || (st.mode & 0o7777) === entry.mode)
+ this[MAKEFS](null, entry)
+ else
+ fs.chmod(entry.absolute, entry.mode, er => this[MAKEFS](er, entry))
+ } else
+ fs.rmdir(entry.absolute, er => this[MAKEFS](er, entry))
+ } else
+ unlinkFile(entry.absolute, er => this[MAKEFS](er, entry))
+ })
+ })
}
- regExp._glob = pattern
- regExp._src = re
-
- return regExp
-}
+ [MAKEFS] (er, entry) {
+ if (er)
+ return this[ONERROR](er, entry)
-minimatch.makeRe = function (pattern, options) {
- return new Minimatch(pattern, options || {}).makeRe()
-}
+ switch (entry.type) {
+ case 'File':
+ case 'OldFile':
+ case 'ContiguousFile':
+ return this[FILE](entry)
-Minimatch.prototype.makeRe = makeRe
-function makeRe () {
- if (this.regexp || this.regexp === false) return this.regexp
+ case 'Link':
+ return this[HARDLINK](entry)
- // at this point, this.set is a 2d array of partial
- // pattern strings, or "**".
- //
- // It's better to use .match(). This function shouldn't
- // be used, really, but it's pretty convenient sometimes,
- // when you just want to work with a regex.
- var set = this.set
+ case 'SymbolicLink':
+ return this[SYMLINK](entry)
- if (!set.length) {
- this.regexp = false
- return this.regexp
+ case 'Directory':
+ case 'GNUDumpDir':
+ return this[DIRECTORY](entry)
+ }
}
- var options = this.options
-
- var twoStar = options.noglobstar ? star
- : options.dot ? twoStarDot
- : twoStarNoDot
- var flags = options.nocase ? 'i' : ''
-
- var re = set.map(function (pattern) {
- return pattern.map(function (p) {
- return (p === GLOBSTAR) ? twoStar
- : (typeof p === 'string') ? regExpEscape(p)
- : p._src
- }).join('\\\/')
- }).join('|')
-
- // must match entire pattern
- // ending in a * or ** will make it less strict.
- re = '^(?:' + re + ')$'
- // can match anything, as long as it's not this.
- if (this.negate) re = '^(?!' + re + ').*$'
-
- try {
- this.regexp = new RegExp(re, flags)
- } catch (ex) {
- this.regexp = false
+ [LINK] (entry, linkpath, link) {
+ // XXX: get the type ('file' or 'dir') for windows
+ fs[link](linkpath, entry.absolute, er => {
+ if (er)
+ return this[ONERROR](er, entry)
+ this[UNPEND]()
+ entry.resume()
+ })
}
- return this.regexp
}
-minimatch.match = function (list, pattern, options) {
- options = options || {}
- var mm = new Minimatch(pattern, options)
- list = list.filter(function (f) {
- return mm.match(f)
- })
- if (mm.options.nonull && !list.length) {
- list.push(pattern)
+class UnpackSync extends Unpack {
+ constructor (opt) {
+ super(opt)
}
- return list
-}
-
-Minimatch.prototype.match = match
-function match (f, partial) {
- this.debug('match', f, this.pattern)
- // short-circuit in the case of busted things.
- // comments, etc.
- if (this.comment) return false
- if (this.empty) return f === ''
-
- if (f === '/' && partial) return true
-
- var options = this.options
- // windows: need to use /, not \
- if (path.sep !== '/') {
- f = f.split(path.sep).join('/')
+ [CHECKFS] (entry) {
+ const er = this[MKDIR](path.dirname(entry.absolute), this.dmode)
+ if (er)
+ return this[ONERROR](er, entry)
+ try {
+ const st = fs.lstatSync(entry.absolute)
+ if (this.keep || this.newer && st.mtime > entry.mtime)
+ return this[SKIP](entry)
+ else if (this[ISREUSABLE](entry, st))
+ return this[MAKEFS](null, entry)
+ else {
+ try {
+ if (st.isDirectory()) {
+ if (entry.type === 'Directory') {
+ if (entry.mode && (st.mode & 0o7777) !== entry.mode)
+ fs.chmodSync(entry.absolute, entry.mode)
+ } else
+ fs.rmdirSync(entry.absolute)
+ } else
+ unlinkFileSync(entry.absolute)
+ return this[MAKEFS](null, entry)
+ } catch (er) {
+ return this[ONERROR](er, entry)
+ }
+ }
+ } catch (er) {
+ return this[MAKEFS](null, entry)
+ }
}
- // treat the test path as a set of pathparts.
- f = f.split(slashSplit)
- this.debug(this.pattern, 'split', f)
-
- // just ONE of the pattern sets in this.set needs to match
- // in order for it to be valid. If negating, then just one
- // match means that we have failed.
- // Either way, return on the first hit.
-
- var set = this.set
- this.debug(this.pattern, 'set', set)
+ [FILE] (entry) {
+ const mode = entry.mode & 0o7777 || this.fmode
- // Find the basename of the path by looking for the last non-empty segment
- var filename
- var i
- for (i = f.length - 1; i >= 0; i--) {
- filename = f[i]
- if (filename) break
- }
+ const oner = er => {
+ try { fs.closeSync(fd) } catch (_) {}
+ if (er)
+ this[ONERROR](er, entry)
+ }
- for (i = 0; i < set.length; i++) {
- var pattern = set[i]
- var file = f
- if (options.matchBase && pattern.length === 1) {
- file = [filename]
+ let stream
+ let fd
+ try {
+ fd = fs.openSync(entry.absolute, 'w', mode)
+ } catch (er) {
+ return oner(er)
}
- var hit = this.matchOne(file, pattern, partial)
- if (hit) {
- if (options.flipNegate) return true
- return !this.negate
+ const tx = this.transform ? this.transform(entry) || entry : entry
+ if (tx !== entry) {
+ tx.on('error', er => this[ONERROR](er, entry))
+ entry.pipe(tx)
}
- }
- // didn't get any hits. this is success if it's a negative
- // pattern, failure otherwise.
- if (options.flipNegate) return false
- return this.negate
-}
-
-// set partial to true to test if, for example,
-// "/a/b" matches the start of "/*/b/*/d"
-// Partial means, if you run out of file before you run
-// out of pattern, then that's fine, as long as all
-// the parts match.
-Minimatch.prototype.matchOne = function (file, pattern, partial) {
- var options = this.options
-
- this.debug('matchOne',
- { 'this': this, file: file, pattern: pattern })
-
- this.debug('matchOne', file.length, pattern.length)
-
- for (var fi = 0,
- pi = 0,
- fl = file.length,
- pl = pattern.length
- ; (fi < fl) && (pi < pl)
- ; fi++, pi++) {
- this.debug('matchOne loop')
- var p = pattern[pi]
- var f = file[fi]
-
- this.debug(pattern, p, f)
-
- // should be impossible.
- // some invalid regexp stuff in the set.
- if (p === false) return false
-
- if (p === GLOBSTAR) {
- this.debug('GLOBSTAR', [pattern, p, f])
+ tx.on('data', chunk => {
+ try {
+ fs.writeSync(fd, chunk, 0, chunk.length)
+ } catch (er) {
+ oner(er)
+ }
+ })
- // "**"
- // a/**/b/**/c would match the following:
- // a/b/x/y/z/c
- // a/x/y/z/b/c
- // a/b/x/b/x/c
- // a/b/c
- // To do this, take the rest of the pattern after
- // the **, and see if it would match the file remainder.
- // If so, return success.
- // If not, the ** "swallows" a segment, and try again.
- // This is recursively awful.
- //
- // a/**/b/**/c matching a/b/x/y/z/c
- // - a matches a
- // - doublestar
- // - matchOne(b/x/y/z/c, b/**/c)
- // - b matches b
- // - doublestar
- // - matchOne(x/y/z/c, c) -> no
- // - matchOne(y/z/c, c) -> no
- // - matchOne(z/c, c) -> no
- // - matchOne(c, c) yes, hit
- var fr = fi
- var pr = pi + 1
- if (pr === pl) {
- this.debug('** at the end')
- // a ** at the end will just swallow the rest.
- // We have found a match.
- // however, it will not swallow /.x, unless
- // options.dot is set.
- // . and .. are *never* matched by **, for explosively
- // exponential reasons.
- for (; fi < fl; fi++) {
- if (file[fi] === '.' || file[fi] === '..' ||
- (!options.dot && file[fi].charAt(0) === '.')) return false
+ tx.on('end', _ => {
+ let er = null
+ // try both, falling futimes back to utimes
+ // if either fails, handle the first error
+ if (entry.mtime && !this.noMtime) {
+ const atime = entry.atime || new Date()
+ const mtime = entry.mtime
+ try {
+ fs.futimesSync(fd, atime, mtime)
+ } catch (futimeser) {
+ try {
+ fs.utimesSync(entry.absolute, atime, mtime)
+ } catch (utimeser) {
+ er = futimeser
+ }
}
- return true
}
- // ok, let's see if we can swallow whatever we can.
- while (fr < fl) {
- var swallowee = file[fr]
-
- this.debug('\nglobstar while', file, fr, pattern, pr, swallowee)
+ if (this[DOCHOWN](entry)) {
+ const uid = this[UID](entry)
+ const gid = this[GID](entry)
- // XXX remove this slice. Just pass the start index.
- if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
- this.debug('globstar found match!', fr, fl, swallowee)
- // found a match.
- return true
- } else {
- // can't swallow "." or ".." ever.
- // can only swallow ".foo" when explicitly asked.
- if (swallowee === '.' || swallowee === '..' ||
- (!options.dot && swallowee.charAt(0) === '.')) {
- this.debug('dot detected!', file, fr, pattern, pr)
- break
+ try {
+ fs.fchownSync(fd, uid, gid)
+ } catch (fchowner) {
+ try {
+ fs.chownSync(entry.absolute, uid, gid)
+ } catch (chowner) {
+ er = er || fchowner
}
-
- // ** swallows a segment, and continue.
- this.debug('globstar swallow a segment, and continue')
- fr++
}
}
- // no match was found.
- // However, in partial mode, we can't say this is necessarily over.
- // If there's more *pattern* left, then
- if (partial) {
- // ran out of file
- this.debug('\n>>> no match, partial?', file, fr, pattern, pr)
- if (fr === fl) return true
- }
- return false
- }
+ oner(er)
+ })
+ }
- // something other than **
- // non-magic patterns just have to match exactly
- // patterns with magic have been turned into regexps.
- var hit
- if (typeof p === 'string') {
- if (options.nocase) {
- hit = f.toLowerCase() === p.toLowerCase()
- } else {
- hit = f === p
- }
- this.debug('string match', p, f, hit)
- } else {
- hit = f.match(p)
- this.debug('pattern match', p, f, hit)
+ [DIRECTORY] (entry) {
+ const mode = entry.mode & 0o7777 || this.dmode
+ const er = this[MKDIR](entry.absolute, mode)
+ if (er)
+ return this[ONERROR](er, entry)
+ if (entry.mtime && !this.noMtime) {
+ try {
+ fs.utimesSync(entry.absolute, entry.atime || new Date(), entry.mtime)
+ } catch (er) {}
}
-
- if (!hit) return false
+ if (this[DOCHOWN](entry)) {
+ try {
+ fs.chownSync(entry.absolute, this[UID](entry), this[GID](entry))
+ } catch (er) {}
+ }
+ entry.resume()
}
- // Note: ending in / means that we'll get a final ""
- // at the end of the pattern. This can only match a
- // corresponding "" at the end of the file.
- // If the file ends in /, then it can only match a
- // a pattern that ends in /, unless the pattern just
- // doesn't have any more for it. But, a/b/ should *not*
- // match "a/b/*", even though "" matches against the
- // [^/]*? pattern, except in partial mode, where it might
- // simply not be reached yet.
- // However, a/b/ should still satisfy a/*
-
- // now either we fell off the end of the pattern, or we're done.
- if (fi === fl && pi === pl) {
- // ran out of pattern and filename at the same time.
- // an exact hit!
- return true
- } else if (fi === fl) {
- // ran out of file, but still had pattern left.
- // this is ok if we're doing the match as part of
- // a glob fs traversal.
- return partial
- } else if (pi === pl) {
- // ran out of pattern, still have file left.
- // this is only acceptable if we're on the very last
- // empty segment of a file with a trailing slash.
- // a/* should match a/b/
- var emptyFileEnd = (fi === fl - 1) && (file[fi] === '')
- return emptyFileEnd
+ [MKDIR] (dir, mode) {
+ try {
+ return mkdir.sync(dir, {
+ uid: this.uid,
+ gid: this.gid,
+ processUid: this.processUid,
+ processGid: this.processGid,
+ umask: this.processUmask,
+ preserve: this.preservePaths,
+ unlink: this.unlink,
+ cache: this.dirCache,
+ cwd: this.cwd,
+ mode: mode
+ })
+ } catch (er) {
+ return er
+ }
}
- // should be unreachable.
- throw new Error('wtf?')
-}
-
-// replace stuff like \* with *
-function globUnescape (s) {
- return s.replace(/\\(.)/g, '$1')
+ [LINK] (entry, linkpath, link) {
+ try {
+ fs[link + 'Sync'](linkpath, entry.absolute)
+ entry.resume()
+ } catch (er) {
+ return this[ONERROR](er, entry)
+ }
+ }
}
-function regExpEscape (s) {
- return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&')
-}
+Unpack.Sync = UnpackSync
+module.exports = Unpack
/***/ }),
-/* 94 */
+/* 64 */
/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
-var path = __webpack_require__(622)
+const path = __webpack_require__(622);
+const os = __webpack_require__(87);
-var uniqueSlug = __webpack_require__(336)
+const homedir = os.homedir();
+const tmpdir = os.tmpdir();
+const {env} = process;
-module.exports = function (filepath, prefix, uniq) {
- return path.join(filepath, (prefix ? prefix + '-' : '') + uniqueSlug(uniq))
-}
+const macos = name => {
+ const library = path.join(homedir, 'Library');
+ return {
+ data: path.join(library, 'Application Support', name),
+ config: path.join(library, 'Preferences', name),
+ cache: path.join(library, 'Caches', name),
+ log: path.join(library, 'Logs', name),
+ temp: path.join(tmpdir, name)
+ };
+};
-/***/ }),
-/* 95 */
-/***/ (function(__unusedmodule, exports) {
+const windows = name => {
+ const appData = env.APPDATA || path.join(homedir, 'AppData', 'Roaming');
+ const localAppData = env.LOCALAPPDATA || path.join(homedir, 'AppData', 'Local');
-"use strict";
+ return {
+ // Data/config/cache/log are invented by me as Windows isn't opinionated about this
+ data: path.join(localAppData, name, 'Data'),
+ config: path.join(appData, name, 'Config'),
+ cache: path.join(localAppData, name, 'Cache'),
+ log: path.join(localAppData, name, 'Log'),
+ temp: path.join(tmpdir, name)
+ };
+};
-/*
- * Copyright The OpenTelemetry Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-Object.defineProperty(exports, "__esModule", { value: true });
-//# sourceMappingURL=link.js.map
+// https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html
+const linux = name => {
+ const username = path.basename(homedir);
-/***/ }),
-/* 96 */,
-/* 97 */
-/***/ (function(module) {
+ return {
+ data: path.join(env.XDG_DATA_HOME || path.join(homedir, '.local', 'share'), name),
+ config: path.join(env.XDG_CONFIG_HOME || path.join(homedir, '.config'), name),
+ cache: path.join(env.XDG_CACHE_HOME || path.join(homedir, '.cache'), name),
+ // https://wiki.debian.org/XDGBaseDirectorySpecification#state
+ log: path.join(env.XDG_STATE_HOME || path.join(homedir, '.local', 'state'), name),
+ temp: path.join(tmpdir, username, name)
+ };
+};
-"use strict";
+const envPaths = (name, options) => {
+ if (typeof name !== 'string') {
+ throw new TypeError(`Expected string, got ${typeof name}`);
+ }
-/* eslint-disable yoda */
-module.exports = x => {
- if (Number.isNaN(x)) {
- return false;
+ options = Object.assign({suffix: 'nodejs'}, options);
+
+ if (options.suffix) {
+ // Add suffix to prevent possible conflict with native apps
+ name += `-${options.suffix}`;
}
- // code points are derived from:
- // http://www.unix.org/Public/UNIDATA/EastAsianWidth.txt
- if (
- x >= 0x1100 && (
- x <= 0x115f || // Hangul Jamo
- x === 0x2329 || // LEFT-POINTING ANGLE BRACKET
- x === 0x232a || // RIGHT-POINTING ANGLE BRACKET
- // CJK Radicals Supplement .. Enclosed CJK Letters and Months
- (0x2e80 <= x && x <= 0x3247 && x !== 0x303f) ||
- // Enclosed CJK Letters and Months .. CJK Unified Ideographs Extension A
- (0x3250 <= x && x <= 0x4dbf) ||
- // CJK Unified Ideographs .. Yi Radicals
- (0x4e00 <= x && x <= 0xa4c6) ||
- // Hangul Jamo Extended-A
- (0xa960 <= x && x <= 0xa97c) ||
- // Hangul Syllables
- (0xac00 <= x && x <= 0xd7a3) ||
- // CJK Compatibility Ideographs
- (0xf900 <= x && x <= 0xfaff) ||
- // Vertical Forms
- (0xfe10 <= x && x <= 0xfe19) ||
- // CJK Compatibility Forms .. Small Form Variants
- (0xfe30 <= x && x <= 0xfe6b) ||
- // Halfwidth and Fullwidth Forms
- (0xff01 <= x && x <= 0xff60) ||
- (0xffe0 <= x && x <= 0xffe6) ||
- // Kana Supplement
- (0x1b000 <= x && x <= 0x1b001) ||
- // Enclosed Ideographic Supplement
- (0x1f200 <= x && x <= 0x1f251) ||
- // CJK Unified Ideographs Extension B .. Tertiary Ideographic Plane
- (0x20000 <= x && x <= 0x3fffd)
- )
- ) {
- return true;
+ if (process.platform === 'darwin') {
+ return macos(name);
}
- return false;
+ if (process.platform === 'win32') {
+ return windows(name);
+ }
+
+ return linux(name);
};
+module.exports = envPaths;
+// TODO: Remove this for the next major release
+module.exports.default = envPaths;
+
/***/ }),
-/* 98 */
+/* 65 */
/***/ (function(module) {
+// Generated by CoffeeScript 1.12.7
+(function() {
+ module.exports = {
+ Disconnected: 1,
+ Preceding: 2,
+ Following: 4,
+ Contains: 8,
+ ContainedBy: 16,
+ ImplementationSpecific: 32
+ };
+
+}).call(this);
+
+
+/***/ }),
+/* 66 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
"use strict";
+var stripAnsi = __webpack_require__(774);
+var codePointAt = __webpack_require__(588);
+var isFullwidthCodePoint = __webpack_require__(61);
-module.exports = stringifyPackage
+// https://github.com/nodejs/io.js/blob/cff7300a578be1b10001f2d967aaedc88aee6402/lib/readline.js#L1345
+module.exports = function (str) {
+ if (typeof str !== 'string' || str.length === 0) {
+ return 0;
+ }
-const DEFAULT_INDENT = 2
-const CRLF = '\r\n'
-const LF = '\n'
+ var width = 0;
-function stringifyPackage (data, indent, newline) {
- indent = indent || (indent === 0 ? 0 : DEFAULT_INDENT)
- const json = JSON.stringify(data, null, indent)
+ str = stripAnsi(str);
- if (newline === CRLF) {
- return json.replace(/\n/g, CRLF) + CRLF
- }
+ for (var i = 0; i < str.length; i++) {
+ var code = codePointAt(str, i);
- return json + LF
-}
+ // ignore control characters
+ if (code <= 0x1f || (code >= 0x7f && code <= 0x9f)) {
+ continue;
+ }
+
+ // surrogates
+ if (code >= 0x10000) {
+ i++;
+ }
+
+ if (isFullwidthCodePoint(code)) {
+ width += 2;
+ } else {
+ width++;
+ }
+ }
+
+ return width;
+};
/***/ }),
-/* 99 */,
-/* 100 */
+/* 67 */,
+/* 68 */
/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
-// try to find the most reasonable prefix to use
-module.exports = findPrefix
+/**
+ * headers.js
+ *
+ * Headers class offers convenient helpers
+ */
-const fs = __webpack_require__(747)
-const path = __webpack_require__(622)
+const common = __webpack_require__(477)
+const checkInvalidHeaderChar = common.checkInvalidHeaderChar
+const checkIsHttpToken = common.checkIsHttpToken
-function findPrefix (dir) {
- return new Promise((resolve, reject) => {
- dir = path.resolve(dir)
+function sanitizeName (name) {
+ name += ''
+ if (!checkIsHttpToken(name)) {
+ throw new TypeError(`${name} is not a legal HTTP header name`)
+ }
+ return name.toLowerCase()
+}
- // this is a weird special case where an infinite recurse of
- // node_modules folders resolves to the level that contains the
- // very first node_modules folder
- let walkedUp = false
- while (path.basename(dir) === 'node_modules') {
- dir = path.dirname(dir)
- walkedUp = true
- }
- if (walkedUp) {
- resolve(dir)
- } else {
- resolve(findPrefix_(dir))
- }
- })
+function sanitizeValue (value) {
+ value += ''
+ if (checkInvalidHeaderChar(value)) {
+ throw new TypeError(`${value} is not a legal HTTP header value`)
+ }
+ return value
}
-function findPrefix_ (dir, original) {
- if (!original) original = dir
+const MAP = Symbol('map')
+class Headers {
+ /**
+ * Headers class
+ *
+ * @param Object headers Response headers
+ * @return Void
+ */
+ constructor (init) {
+ this[MAP] = Object.create(null)
- const parent = path.dirname(dir)
- // this is a platform independent way of checking if we're in the root
- // directory
- if (parent === dir) return Promise.resolve(original)
+ if (init instanceof Headers) {
+ const rawHeaders = init.raw()
+ const headerNames = Object.keys(rawHeaders)
- return new Promise((resolve, reject) => {
- fs.readdir(dir, (err, files) => {
- if (err) {
- // an error right away is a bad sign.
- // unless the prefix was simply a non
- // existent directory.
- if (err && dir === original && err.code !== 'ENOENT') {
- reject(err)
- } else {
- resolve(original)
+ for (const headerName of headerNames) {
+ for (const value of rawHeaders[headerName]) {
+ this.append(headerName, value)
}
- } else if (files.indexOf('node_modules') !== -1 ||
- files.indexOf('package.json') !== -1) {
- resolve(dir)
- } else {
- resolve(findPrefix_(parent, original))
}
- })
- })
-}
+ return
+ }
-/***/ }),
-/* 101 */,
-/* 102 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
+ // We don't worry about converting prop to ByteString here as append()
+ // will handle it.
+ if (init == null) {
+ // no op
+ } else if (typeof init === 'object') {
+ const method = init[Symbol.iterator]
+ if (method != null) {
+ if (typeof method !== 'function') {
+ throw new TypeError('Header pairs must be iterable')
+ }
-"use strict";
+ // sequence>
+ // Note: per spec we have to first exhaust the lists then process them
+ const pairs = []
+ for (const pair of init) {
+ if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {
+ throw new TypeError('Each header pair must be iterable')
+ }
+ pairs.push(Array.from(pair))
+ }
-// For internal use, subject to change.
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
- result["default"] = mod;
- return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-// We use any as a valid input type
-/* eslint-disable @typescript-eslint/no-explicit-any */
-const fs = __importStar(__webpack_require__(747));
-const os = __importStar(__webpack_require__(87));
-const utils_1 = __webpack_require__(82);
-function issueCommand(command, message) {
- const filePath = process.env[`GITHUB_${command}`];
- if (!filePath) {
- throw new Error(`Unable to find environment variable for file command ${command}`);
+ for (const pair of pairs) {
+ if (pair.length !== 2) {
+ throw new TypeError('Each header pair must be a name/value tuple')
+ }
+ this.append(pair[0], pair[1])
+ }
+ } else {
+ // record
+ for (const key of Object.keys(init)) {
+ const value = init[key]
+ this.append(key, value)
+ }
+ }
+ } else {
+ throw new TypeError('Provided initializer must be an object')
}
- if (!fs.existsSync(filePath)) {
- throw new Error(`Missing file at path: ${filePath}`);
+
+ Object.defineProperty(this, Symbol.toStringTag, {
+ value: 'Headers',
+ writable: false,
+ enumerable: false,
+ configurable: true
+ })
+ }
+
+ /**
+ * Return first header value given name
+ *
+ * @param String name Header name
+ * @return Mixed
+ */
+ get (name) {
+ const list = this[MAP][sanitizeName(name)]
+ if (!list) {
+ return null
}
- fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {
- encoding: 'utf8'
- });
-}
-exports.issueCommand = issueCommand;
-//# sourceMappingURL=file-command.js.map
-/***/ }),
-/* 103 */,
-/* 104 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
+ return list.join(', ')
+ }
-"use strict";
+ /**
+ * Iterate over all headers
+ *
+ * @param Function callback Executed for each item with parameters (value, name, thisArg)
+ * @param Boolean thisArg `this` context for callback function
+ * @return Void
+ */
+ forEach (callback, thisArg) {
+ let pairs = getHeaderPairs(this)
+ let i = 0
+ while (i < pairs.length) {
+ const name = pairs[i][0]
+ const value = pairs[i][1]
+ callback.call(thisArg, value, name, this)
+ pairs = getHeaderPairs(this)
+ i++
+ }
+ }
+ /**
+ * Overwrite header values given name
+ *
+ * @param String name Header name
+ * @param String value Header value
+ * @return Void
+ */
+ set (name, value) {
+ this[MAP][sanitizeName(name)] = [sanitizeValue(value)]
+ }
-Object.defineProperty(exports, "__esModule", {
- value: true
-});
-exports.default = void 0;
+ /**
+ * Append a value onto existing header
+ *
+ * @param String name Header name
+ * @param String value Header value
+ * @return Void
+ */
+ append (name, value) {
+ if (!this.has(name)) {
+ this.set(name, value)
+ return
+ }
-var _validate = _interopRequireDefault(__webpack_require__(676));
+ this[MAP][sanitizeName(name)].push(sanitizeValue(value))
+ }
-function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+ /**
+ * Check for header name existence
+ *
+ * @param String name Header name
+ * @return Boolean
+ */
+ has (name) {
+ return !!this[MAP][sanitizeName(name)]
+ }
-function version(uuid) {
- if (!(0, _validate.default)(uuid)) {
- throw TypeError('Invalid UUID');
+ /**
+ * Delete all header values given name
+ *
+ * @param String name Header name
+ * @return Void
+ */
+ delete (name) {
+ delete this[MAP][sanitizeName(name)]
+ };
+
+ /**
+ * Return raw headers (non-spec api)
+ *
+ * @return Object
+ */
+ raw () {
+ return this[MAP]
}
- return parseInt(uuid.substr(14, 1), 16);
+ /**
+ * Get an iterator on keys.
+ *
+ * @return Iterator
+ */
+ keys () {
+ return createHeadersIterator(this, 'key')
+ }
+
+ /**
+ * Get an iterator on values.
+ *
+ * @return Iterator
+ */
+ values () {
+ return createHeadersIterator(this, 'value')
+ }
+
+ /**
+ * Get an iterator on entries.
+ *
+ * This is the default iterator of the Headers object.
+ *
+ * @return Iterator
+ */
+ [Symbol.iterator] () {
+ return createHeadersIterator(this, 'key+value')
+ }
}
+Headers.prototype.entries = Headers.prototype[Symbol.iterator]
+
+Object.defineProperty(Headers.prototype, Symbol.toStringTag, {
+ value: 'HeadersPrototype',
+ writable: false,
+ enumerable: false,
+ configurable: true
+})
+
+function getHeaderPairs (headers, kind) {
+ const keys = Object.keys(headers[MAP]).sort()
+ return keys.map(
+ kind === 'key'
+ ? k => [k]
+ : k => [k, headers.get(k)]
+ )
+}
+
+const INTERNAL = Symbol('internal')
+
+function createHeadersIterator (target, kind) {
+ const iterator = Object.create(HeadersIteratorPrototype)
+ iterator[INTERNAL] = {
+ target,
+ kind,
+ index: 0
+ }
+ return iterator
+}
+
+const HeadersIteratorPrototype = Object.setPrototypeOf({
+ next () {
+ // istanbul ignore if
+ if (!this ||
+ Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
+ throw new TypeError('Value of `this` is not a HeadersIterator')
+ }
+
+ const target = this[INTERNAL].target
+ const kind = this[INTERNAL].kind
+ const index = this[INTERNAL].index
+ const values = getHeaderPairs(target, kind)
+ const len = values.length
+ if (index >= len) {
+ return {
+ value: undefined,
+ done: true
+ }
+ }
+
+ const pair = values[index]
+ this[INTERNAL].index = index + 1
+
+ let result
+ if (kind === 'key') {
+ result = pair[0]
+ } else if (kind === 'value') {
+ result = pair[1]
+ } else {
+ result = pair
+ }
+
+ return {
+ value: result,
+ done: false
+ }
+ }
+}, Object.getPrototypeOf(
+ Object.getPrototypeOf([][Symbol.iterator]())
+))
+
+Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
+ value: 'HeadersIterator',
+ writable: false,
+ enumerable: false,
+ configurable: true
+})
+
+module.exports = Headers
-var _default = version;
-exports.default = _default;
/***/ }),
-/* 105 */
+/* 69 */
/***/ (function(module) {
-/**
- * Convert array of 16 byte values to UUID string format of the form:
- * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
- */
-var byteToHex = [];
-for (var i = 0; i < 256; ++i) {
- byteToHex[i] = (i + 0x100).toString(16).substr(1);
-}
+// populates missing values
+module.exports = function(dst, src) {
-function bytesToUuid(buf, offset) {
- var i = offset || 0;
- var bth = byteToHex;
- // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4
- return ([
- bth[buf[i++]], bth[buf[i++]],
- bth[buf[i++]], bth[buf[i++]], '-',
- bth[buf[i++]], bth[buf[i++]], '-',
- bth[buf[i++]], bth[buf[i++]], '-',
- bth[buf[i++]], bth[buf[i++]], '-',
- bth[buf[i++]], bth[buf[i++]],
- bth[buf[i++]], bth[buf[i++]],
- bth[buf[i++]], bth[buf[i++]]
- ]).join('');
+ Object.keys(src).forEach(function(prop)
+ {
+ dst[prop] = dst[prop] || src[prop];
+ });
+
+ return dst;
+};
+
+
+/***/ }),
+/* 70 */
+/***/ (function(__unusedmodule, exports) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+//# sourceMappingURL=trace_state.js.map
+
+/***/ }),
+/* 71 */
+/***/ (function() {
+
+"use strict";
+
+if (typeof Symbol === undefined || !Symbol.asyncIterator) {
+ Symbol.asyncIterator = Symbol.for("Symbol.asyncIterator");
}
+//# sourceMappingURL=index.js.map
-module.exports = bytesToUuid;
+/***/ }),
+/* 72 */
+/***/ (function(module) {
+
+"use strict";
+
+
+const pTry = (fn, ...arguments_) => new Promise(resolve => {
+ resolve(fn(...arguments_));
+});
+
+module.exports = pTry;
+// TODO: remove this in the next major version
+module.exports.default = pTry;
/***/ }),
-/* 106 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
+/* 73 */,
+/* 74 */,
+/* 75 */,
+/* 76 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
-Object.defineProperty(exports, '__esModule', { value: true });
+function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
-var tslib = __webpack_require__(422);
+var Buffer = __webpack_require__(254).Buffer;
+var util = __webpack_require__(669);
-var listenersMap = new WeakMap();
-var abortedMap = new WeakMap();
-/**
- * An aborter instance implements AbortSignal interface, can abort HTTP requests.
+function copyBuffer(src, target, offset) {
+ src.copy(target, offset);
+}
+
+module.exports = function () {
+ function BufferList() {
+ _classCallCheck(this, BufferList);
+
+ this.head = null;
+ this.tail = null;
+ this.length = 0;
+ }
+
+ BufferList.prototype.push = function push(v) {
+ var entry = { data: v, next: null };
+ if (this.length > 0) this.tail.next = entry;else this.head = entry;
+ this.tail = entry;
+ ++this.length;
+ };
+
+ BufferList.prototype.unshift = function unshift(v) {
+ var entry = { data: v, next: this.head };
+ if (this.length === 0) this.tail = entry;
+ this.head = entry;
+ ++this.length;
+ };
+
+ BufferList.prototype.shift = function shift() {
+ if (this.length === 0) return;
+ var ret = this.head.data;
+ if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;
+ --this.length;
+ return ret;
+ };
+
+ BufferList.prototype.clear = function clear() {
+ this.head = this.tail = null;
+ this.length = 0;
+ };
+
+ BufferList.prototype.join = function join(s) {
+ if (this.length === 0) return '';
+ var p = this.head;
+ var ret = '' + p.data;
+ while (p = p.next) {
+ ret += s + p.data;
+ }return ret;
+ };
+
+ BufferList.prototype.concat = function concat(n) {
+ if (this.length === 0) return Buffer.alloc(0);
+ if (this.length === 1) return this.head.data;
+ var ret = Buffer.allocUnsafe(n >>> 0);
+ var p = this.head;
+ var i = 0;
+ while (p) {
+ copyBuffer(p.data, ret, i);
+ i += p.data.length;
+ p = p.next;
+ }
+ return ret;
+ };
+
+ return BufferList;
+}();
+
+if (util && util.inspect && util.inspect.custom) {
+ module.exports.prototype[util.inspect.custom] = function () {
+ var obj = util.inspect({ length: this.length });
+ return this.constructor.name + ' ' + obj;
+ };
+}
+
+/***/ }),
+/* 77 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+/*
+ * Copyright The OpenTelemetry Authors
*
- * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled.
- * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation
- * cannot or will not ever be cancelled.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
*
- * @example
- * // Abort without timeout
- * await doAsyncWork(AbortSignal.none);
+ * https://www.apache.org/licenses/LICENSE-2.0
*
- * @export
- * @class AbortSignal
- * @implements {AbortSignalLike}
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
*/
-var AbortSignal = /** @class */ (function () {
- function AbortSignal() {
- /**
- * onabort event listener.
- *
- * @memberof AbortSignal
- */
- this.onabort = null;
- listenersMap.set(this, []);
- abortedMap.set(this, false);
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ContextAPI = void 0;
+var context_base_1 = __webpack_require__(459);
+var global_utils_1 = __webpack_require__(976);
+var NOOP_CONTEXT_MANAGER = new context_base_1.NoopContextManager();
+/**
+ * Singleton object which represents the entry point to the OpenTelemetry Context API
+ */
+var ContextAPI = /** @class */ (function () {
+ /** Empty private constructor prevents end users from constructing a new instance of the API */
+ function ContextAPI() {
}
- Object.defineProperty(AbortSignal.prototype, "aborted", {
- /**
- * Status of whether aborted or not.
- *
- * @readonly
- * @type {boolean}
- * @memberof AbortSignal
- */
- get: function () {
- if (!abortedMap.has(this)) {
- throw new TypeError("Expected `this` to be an instance of AbortSignal.");
- }
- return abortedMap.get(this);
- },
- enumerable: true,
- configurable: true
- });
- Object.defineProperty(AbortSignal, "none", {
- /**
- * Creates a new AbortSignal instance that will never be aborted.
- *
- * @readonly
- * @static
- * @type {AbortSignal}
- * @memberof AbortSignal
- */
- get: function () {
- return new AbortSignal();
- },
- enumerable: true,
- configurable: true
- });
- /**
- * Added new "abort" event listener, only support "abort" event.
- *
- * @param {"abort"} _type Only support "abort" event
- * @param {(this: AbortSignalLike, ev: any) => any} listener
- * @memberof AbortSignal
- */
- AbortSignal.prototype.addEventListener = function (
- // tslint:disable-next-line:variable-name
- _type, listener) {
- if (!listenersMap.has(this)) {
- throw new TypeError("Expected `this` to be an instance of AbortSignal.");
+ /** Get the singleton instance of the Context API */
+ ContextAPI.getInstance = function () {
+ if (!this._instance) {
+ this._instance = new ContextAPI();
}
- var listeners = listenersMap.get(this);
- listeners.push(listener);
+ return this._instance;
};
/**
- * Remove "abort" event listener, only support "abort" event.
- *
- * @param {"abort"} _type Only support "abort" event
- * @param {(this: AbortSignalLike, ev: any) => any} listener
- * @memberof AbortSignal
+ * Set the current context manager. Returns the initialized context manager
*/
- AbortSignal.prototype.removeEventListener = function (
- // tslint:disable-next-line:variable-name
- _type, listener) {
- if (!listenersMap.has(this)) {
- throw new TypeError("Expected `this` to be an instance of AbortSignal.");
- }
- var listeners = listenersMap.get(this);
- var index = listeners.indexOf(listener);
- if (index > -1) {
- listeners.splice(index, 1);
+ ContextAPI.prototype.setGlobalContextManager = function (contextManager) {
+ if (global_utils_1._global[global_utils_1.GLOBAL_CONTEXT_MANAGER_API_KEY]) {
+ // global context manager has already been set
+ return this._getContextManager();
}
+ global_utils_1._global[global_utils_1.GLOBAL_CONTEXT_MANAGER_API_KEY] = global_utils_1.makeGetter(global_utils_1.API_BACKWARDS_COMPATIBILITY_VERSION, contextManager, NOOP_CONTEXT_MANAGER);
+ return contextManager;
};
/**
- * Dispatches a synthetic event to the AbortSignal.
+ * Get the currently active context
*/
- AbortSignal.prototype.dispatchEvent = function (_event) {
- throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes.");
+ ContextAPI.prototype.active = function () {
+ return this._getContextManager().active();
};
- return AbortSignal;
-}());
-/**
- * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered.
- * Will try to trigger abort event for all linked AbortSignal nodes.
- *
- * - If there is a timeout, the timer will be cancelled.
- * - If aborted is true, nothing will happen.
- *
- * @returns
- * @internal
- */
-function abortSignal(signal) {
- if (signal.aborted) {
- return;
- }
- if (signal.onabort) {
- signal.onabort.call(signal);
- }
- var listeners = listenersMap.get(signal);
- if (listeners) {
- listeners.forEach(function (listener) {
- listener.call(signal, { type: "abort" });
- });
- }
- abortedMap.set(signal, true);
-}
-
-/**
- * This error is thrown when an asynchronous operation has been aborted.
- * Check for this error by testing the `name` that the name property of the
- * error matches `"AbortError"`.
- *
- * @example
- * const controller = new AbortController();
- * controller.abort();
- * try {
- * doAsyncWork(controller.signal)
- * } catch (e) {
- * if (e.name === 'AbortError') {
- * // handle abort error here.
- * }
- * }
- */
-var AbortError = /** @class */ (function (_super) {
- tslib.__extends(AbortError, _super);
- function AbortError(message) {
- var _this = _super.call(this, message) || this;
- _this.name = "AbortError";
- return _this;
- }
- return AbortError;
-}(Error));
-/**
- * An AbortController provides an AbortSignal and the associated controls to signal
- * that an asynchronous operation should be aborted.
- *
- * @example
- * // Abort an operation when another event fires
- * const controller = new AbortController();
- * const signal = controller.signal;
- * doAsyncWork(signal);
- * button.addEventListener('click', () => controller.abort());
- *
- * @example
- * // Share aborter cross multiple operations in 30s
- * // Upload the same data to 2 different data centers at the same time,
- * // abort another when any of them is finished
- * const controller = AbortController.withTimeout(30 * 1000);
- * doAsyncWork(controller.signal).then(controller.abort);
- * doAsyncWork(controller.signal).then(controller.abort);
- *
- * @example
- * // Cascaded aborting
- * // All operations can't take more than 30 seconds
- * const aborter = Aborter.timeout(30 * 1000);
- *
- * // Following 2 operations can't take more than 25 seconds
- * await doAsyncWork(aborter.withTimeout(25 * 1000));
- * await doAsyncWork(aborter.withTimeout(25 * 1000));
- *
- * @export
- * @class AbortController
- * @implements {AbortSignalLike}
- */
-var AbortController = /** @class */ (function () {
- function AbortController(parentSignals) {
- var _this = this;
- this._signal = new AbortSignal();
- if (!parentSignals) {
- return;
- }
- // coerce parentSignals into an array
- if (!Array.isArray(parentSignals)) {
- parentSignals = arguments;
- }
- for (var _i = 0, parentSignals_1 = parentSignals; _i < parentSignals_1.length; _i++) {
- var parentSignal = parentSignals_1[_i];
- // if the parent signal has already had abort() called,
- // then call abort on this signal as well.
- if (parentSignal.aborted) {
- this.abort();
- }
- else {
- // when the parent signal aborts, this signal should as well.
- parentSignal.addEventListener("abort", function () {
- _this.abort();
- });
- }
- }
- }
- Object.defineProperty(AbortController.prototype, "signal", {
- /**
- * The AbortSignal associated with this controller that will signal aborted
- * when the abort method is called on this controller.
- *
- * @readonly
- * @type {AbortSignal}
- * @memberof AbortController
- */
- get: function () {
- return this._signal;
- },
- enumerable: true,
- configurable: true
- });
/**
- * Signal that any operations passed this controller's associated abort signal
- * to cancel any remaining work and throw an `AbortError`.
+ * Execute a function with an active context
*
- * @memberof AbortController
+ * @param context context to be active during function execution
+ * @param fn function to execute in a context
*/
- AbortController.prototype.abort = function () {
- abortSignal(this._signal);
+ ContextAPI.prototype.with = function (context, fn) {
+ return this._getContextManager().with(context, fn);
};
/**
- * Creates a new AbortSignal instance that will abort after the provided ms.
+ * Bind a context to a target function or event emitter
*
- * @static
- * @params {number} ms Elapsed time in milliseconds to trigger an abort.
- * @returns {AbortSignal}
+ * @param target function or event emitter to bind
+ * @param context context to bind to the event emitter or function. Defaults to the currently active context
*/
- AbortController.timeout = function (ms) {
- var signal = new AbortSignal();
- var timer = setTimeout(abortSignal, ms, signal);
- // Prevent the active Timer from keeping the Node.js event loop active.
- if (typeof timer.unref === "function") {
- timer.unref();
- }
- return signal;
+ ContextAPI.prototype.bind = function (target, context) {
+ if (context === void 0) { context = this.active(); }
+ return this._getContextManager().bind(target, context);
};
- return AbortController;
+ ContextAPI.prototype._getContextManager = function () {
+ var _a, _b;
+ return ((_b = (_a = global_utils_1._global[global_utils_1.GLOBAL_CONTEXT_MANAGER_API_KEY]) === null || _a === void 0 ? void 0 : _a.call(global_utils_1._global, global_utils_1.API_BACKWARDS_COMPATIBILITY_VERSION)) !== null && _b !== void 0 ? _b : NOOP_CONTEXT_MANAGER);
+ };
+ /** Disable and remove the global context manager */
+ ContextAPI.prototype.disable = function () {
+ this._getContextManager().disable();
+ delete global_utils_1._global[global_utils_1.GLOBAL_CONTEXT_MANAGER_API_KEY];
+ };
+ return ContextAPI;
}());
-
-exports.AbortController = AbortController;
-exports.AbortError = AbortError;
-exports.AbortSignal = AbortSignal;
-//# sourceMappingURL=index.js.map
-
+exports.ContextAPI = ContextAPI;
+//# sourceMappingURL=context.js.map
/***/ }),
-/* 107 */
-/***/ (function(__unusedmodule, exports) {
+/* 78 */
+/***/ (function(module) {
"use strict";
-/*
- * Copyright The OpenTelemetry Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-Object.defineProperty(exports, "__esModule", { value: true });
-//# sourceMappingURL=Observation.js.map
+
+if (typeof process === 'undefined' ||
+ !process.version ||
+ process.version.indexOf('v0.') === 0 ||
+ process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) {
+ module.exports = { nextTick: nextTick };
+} else {
+ module.exports = process
+}
+
+function nextTick(fn, arg1, arg2, arg3) {
+ if (typeof fn !== 'function') {
+ throw new TypeError('"callback" argument must be a function');
+ }
+ var len = arguments.length;
+ var args, i;
+ switch (len) {
+ case 0:
+ case 1:
+ return process.nextTick(fn);
+ case 2:
+ return process.nextTick(function afterTickOne() {
+ fn.call(null, arg1);
+ });
+ case 3:
+ return process.nextTick(function afterTickTwo() {
+ fn.call(null, arg1, arg2);
+ });
+ case 4:
+ return process.nextTick(function afterTickThree() {
+ fn.call(null, arg1, arg2, arg3);
+ });
+ default:
+ args = new Array(len - 1);
+ i = 0;
+ while (i < args.length) {
+ args[i++] = arguments[i];
+ }
+ return process.nextTick(function afterTick() {
+ fn.apply(null, args);
+ });
+ }
+}
+
+
/***/ }),
-/* 108 */,
-/* 109 */,
-/* 110 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+/* 79 */,
+/* 80 */
+/***/ (function(module) {
"use strict";
-// Do a two-pass walk, first to get the list of packages that need to be
-// bundled, then again to get the actual files and folders.
-// Keep a cache of node_modules content and package.json data, so that the
-// second walk doesn't have to re-do all the same work.
+module.exports = parseJson
+function parseJson (txt, reviver, context) {
+ context = context || 20
+ try {
+ return JSON.parse(txt, reviver)
+ } catch (e) {
+ if (typeof txt !== 'string') {
+ const isEmptyArray = Array.isArray(txt) && txt.length === 0
+ const errorMessage = 'Cannot parse ' +
+ (isEmptyArray ? 'an empty array' : String(txt))
+ throw new TypeError(errorMessage)
+ }
+ const syntaxErr = e.message.match(/^Unexpected token.*position\s+(\d+)/i)
+ const errIdx = syntaxErr
+ ? +syntaxErr[1]
+ : e.message.match(/^Unexpected end of JSON.*/i)
+ ? txt.length - 1
+ : null
+ if (errIdx != null) {
+ const start = errIdx <= context
+ ? 0
+ : errIdx - context
+ const end = errIdx + context >= txt.length
+ ? txt.length
+ : errIdx + context
+ e.message += ` while parsing near '${
+ start === 0 ? '' : '...'
+ }${txt.slice(start, end)}${
+ end === txt.length ? '' : '...'
+ }'`
+ } else {
+ e.message += ` while parsing '${txt.slice(0, context * 2)}'`
+ }
+ throw e
+ }
+}
-const bundleWalk = __webpack_require__(650)
-const BundleWalker = bundleWalk.BundleWalker
-const BundleWalkerSync = bundleWalk.BundleWalkerSync
-const ignoreWalk = __webpack_require__(418)
-const IgnoreWalker = ignoreWalk.Walker
-const IgnoreWalkerSync = ignoreWalk.WalkerSync
+/***/ }),
+/* 81 */,
+/* 82 */
+/***/ (function(__unusedmodule, exports) {
-const rootBuiltinRules = Symbol('root-builtin-rules')
-const packageNecessaryRules = Symbol('package-necessary-rules')
-const path = __webpack_require__(622)
+"use strict";
-const normalizePackageBin = __webpack_require__(787)
+// We use any as a valid input type
+/* eslint-disable @typescript-eslint/no-explicit-any */
+Object.defineProperty(exports, "__esModule", { value: true });
+/**
+ * Sanitizes an input into a string so it can be passed into issueCommand safely
+ * @param input input to sanitize into a string
+ */
+function toCommandValue(input) {
+ if (input === null || input === undefined) {
+ return '';
+ }
+ else if (typeof input === 'string' || input instanceof String) {
+ return input;
+ }
+ return JSON.stringify(input);
+}
+exports.toCommandValue = toCommandValue;
+//# sourceMappingURL=utils.js.map
-const defaultRules = [
- '.npmignore',
- '.gitignore',
- '**/.git',
- '**/.svn',
- '**/.hg',
- '**/CVS',
- '**/.git/**',
- '**/.svn/**',
- '**/.hg/**',
- '**/CVS/**',
- '/.lock-wscript',
- '/.wafpickle-*',
- '/build/config.gypi',
- 'npm-debug.log',
- '**/.npmrc',
- '.*.swp',
- '.DS_Store',
- '**/.DS_Store/**',
- '._*',
- '**/._*/**',
- '*.orig',
- '/package-lock.json',
- '/yarn.lock',
- 'archived-packages/**',
- 'core',
- '!core/',
- '!**/core/',
- '*.core',
- '*.vgcore',
- 'vgcore.*',
- 'core.+([0-9])',
-]
+/***/ }),
+/* 83 */,
+/* 84 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
-// There may be others, but :?|<> are handled by node-tar
-const nameIsBadForWindows = file => /\*/.test(file)
+"use strict";
-// a decorator that applies our custom rules to an ignore walker
-const npmWalker = Class => class Walker extends Class {
- constructor (opt) {
- opt = opt || {}
+module.exports =
+function(Promise, PromiseArray, apiRejection) {
+var util = __webpack_require__(248);
+var RangeError = __webpack_require__(351).RangeError;
+var AggregateError = __webpack_require__(351).AggregateError;
+var isArray = util.isArray;
+var CANCELLATION = {};
- // the order in which rules are applied.
- opt.ignoreFiles = [
- rootBuiltinRules,
- 'package.json',
- '.npmignore',
- '.gitignore',
- packageNecessaryRules
- ]
- opt.includeEmpty = false
- opt.path = opt.path || process.cwd()
- const dirName = path.basename(opt.path)
- const parentName = path.basename(path.dirname(opt.path))
- opt.follow =
- dirName === 'node_modules' ||
- (parentName === 'node_modules' && /^@/.test(dirName))
- super(opt)
+function SomePromiseArray(values) {
+ this.constructor$(values);
+ this._howMany = 0;
+ this._unwrap = false;
+ this._initialized = false;
+}
+util.inherits(SomePromiseArray, PromiseArray);
- // ignore a bunch of things by default at the root level.
- // also ignore anything in node_modules, except bundled dependencies
- if (!this.parent) {
- this.bundled = opt.bundled || []
- this.bundledScopes = Array.from(new Set(
- this.bundled.filter(f => /^@/.test(f))
- .map(f => f.split('/')[0])))
- const rules = defaultRules.join('\n') + '\n'
- this.packageJsonCache = opt.packageJsonCache || new Map()
- super.onReadIgnoreFile(rootBuiltinRules, rules, _=>_)
- } else {
- this.bundled = []
- this.bundledScopes = []
- this.packageJsonCache = this.parent.packageJsonCache
+SomePromiseArray.prototype._init = function () {
+ if (!this._initialized) {
+ return;
}
- }
-
- onReaddir (entries) {
- if (!this.parent) {
- entries = entries.filter(e =>
- e !== '.git' &&
- !(e === 'node_modules' && this.bundled.length === 0)
- )
+ if (this._howMany === 0) {
+ this._resolve([]);
+ return;
}
- return super.onReaddir(entries)
- }
+ this._init$(undefined, -5);
+ var isArrayResolved = isArray(this._values);
+ if (!this._isResolved() &&
+ isArrayResolved &&
+ this._howMany > this._canPossiblyFulfill()) {
+ this._reject(this._getRangeError(this.length()));
+ }
+};
- filterEntry (entry, partial) {
- // get the partial path from the root of the walk
- const p = this.path.substr(this.root.length + 1)
- const pkgre = /^node_modules\/(@[^\/]+\/?[^\/]+|[^\/]+)(\/.*)?$/
- const isRoot = !this.parent
- const pkg = isRoot && pkgre.test(entry) ?
- entry.replace(pkgre, '$1') : null
- const rootNM = isRoot && entry === 'node_modules'
- const rootPJ = isRoot && entry === 'package.json'
+SomePromiseArray.prototype.init = function () {
+ this._initialized = true;
+ this._init();
+};
- return (
- // if we're in a bundled package, check with the parent.
- /^node_modules($|\/)/i.test(p) ? this.parent.filterEntry(
- this.basename + '/' + entry, partial)
+SomePromiseArray.prototype.setUnwrap = function () {
+ this._unwrap = true;
+};
- // if package is bundled, all files included
- // also include @scope dirs for bundled scoped deps
- // they'll be ignored if no files end up in them.
- // However, this only matters if we're in the root.
- // node_modules folders elsewhere, like lib/node_modules,
- // should be included normally unless ignored.
- : pkg ? -1 !== this.bundled.indexOf(pkg) ||
- -1 !== this.bundledScopes.indexOf(pkg)
+SomePromiseArray.prototype.howMany = function () {
+ return this._howMany;
+};
- // only walk top node_modules if we want to bundle something
- : rootNM ? !!this.bundled.length
+SomePromiseArray.prototype.setHowMany = function (count) {
+ this._howMany = count;
+};
- // always include package.json at the root.
- : rootPJ ? true
+SomePromiseArray.prototype._promiseFulfilled = function (value) {
+ this._addFulfilled(value);
+ if (this._fulfilled() === this.howMany()) {
+ this._values.length = this.howMany();
+ if (this.howMany() === 1 && this._unwrap) {
+ this._resolve(this._values[0]);
+ } else {
+ this._resolve(this._values);
+ }
+ return true;
+ }
+ return false;
- // otherwise, follow ignore-walk's logic
- : super.filterEntry(entry, partial)
- )
- }
+};
+SomePromiseArray.prototype._promiseRejected = function (reason) {
+ this._addRejected(reason);
+ return this._checkOutcome();
+};
- filterEntries () {
- if (this.ignoreRules['package.json'])
- this.ignoreRules['.gitignore'] = this.ignoreRules['.npmignore'] = null
- else if (this.ignoreRules['.npmignore'])
- this.ignoreRules['.gitignore'] = null
- this.filterEntries = super.filterEntries
- super.filterEntries()
- }
+SomePromiseArray.prototype._promiseCancelled = function () {
+ if (this._values instanceof Promise || this._values == null) {
+ return this._cancel();
+ }
+ this._addRejected(CANCELLATION);
+ return this._checkOutcome();
+};
- addIgnoreFile (file, then) {
- const ig = path.resolve(this.path, file)
- if (this.packageJsonCache.has(ig))
- this.onPackageJson(ig, this.packageJsonCache.get(ig), then)
- else
- super.addIgnoreFile(file, then)
- }
+SomePromiseArray.prototype._checkOutcome = function() {
+ if (this.howMany() > this._canPossiblyFulfill()) {
+ var e = new AggregateError();
+ for (var i = this.length(); i < this._values.length; ++i) {
+ if (this._values[i] !== CANCELLATION) {
+ e.push(this._values[i]);
+ }
+ }
+ if (e.length > 0) {
+ this._reject(e);
+ } else {
+ this._cancel();
+ }
+ return true;
+ }
+ return false;
+};
- onPackageJson (ig, pkg, then) {
- this.packageJsonCache.set(ig, pkg)
+SomePromiseArray.prototype._fulfilled = function () {
+ return this._totalResolved;
+};
- // if there's a bin, browser or main, make sure we don't ignore it
- // also, don't ignore the package.json itself!
- //
- // Weird side-effect of this: a readme (etc) file will be included
- // if it exists anywhere within a folder with a package.json file.
- // The original intent was only to include these files in the root,
- // but now users in the wild are dependent on that behavior for
- // localized documentation and other use cases. Adding a `/` to
- // these rules, while tempting and arguably more "correct", is a
- // breaking change.
- const rules = [
- pkg.browser ? '!' + pkg.browser : '',
- pkg.main ? '!' + pkg.main : '',
- '!package.json',
- '!npm-shrinkwrap.json',
- '!@(readme|copying|license|licence|notice|changes|changelog|history){,.*[^~$]}'
- ]
- if (pkg.bin) {
- // always an object, because normalized already
- for (const key in pkg.bin)
- rules.push('!' + pkg.bin[key])
+SomePromiseArray.prototype._rejected = function () {
+ return this._values.length - this.length();
+};
+
+SomePromiseArray.prototype._addRejected = function (reason) {
+ this._values.push(reason);
+};
+
+SomePromiseArray.prototype._addFulfilled = function (value) {
+ this._values[this._totalResolved++] = value;
+};
+
+SomePromiseArray.prototype._canPossiblyFulfill = function () {
+ return this.length() - this._rejected();
+};
+
+SomePromiseArray.prototype._getRangeError = function (count) {
+ var message = "Input array must contain at least " +
+ this._howMany + " items but contains only " + count + " items";
+ return new RangeError(message);
+};
+
+SomePromiseArray.prototype._resolveEmptyArray = function () {
+ this._reject(this._getRangeError(0));
+};
+
+function some(promises, howMany) {
+ if ((howMany | 0) !== howMany || howMany < 0) {
+ return apiRejection("expecting a positive integer\u000a\u000a See http://goo.gl/MqrFmX\u000a");
}
+ var ret = new SomePromiseArray(promises);
+ var promise = ret.promise();
+ ret.setHowMany(howMany);
+ ret.init();
+ return promise;
+}
- const data = rules.filter(f => f).join('\n') + '\n'
- super.onReadIgnoreFile(packageNecessaryRules, data, _=>_)
+Promise.some = function (promises, howMany) {
+ return some(promises, howMany);
+};
- if (Array.isArray(pkg.files))
- super.onReadIgnoreFile('package.json', '*\n' + pkg.files.map(
- f => '!' + f + '\n!' + f.replace(/\/+$/, '') + '/**'
- ).join('\n') + '\n', then)
- else
- then()
- }
+Promise.prototype.some = function (howMany) {
+ return some(this, howMany);
+};
- // override parent stat function to completely skip any filenames
- // that will break windows entirely.
- // XXX(isaacs) Next major version should make this an error instead.
- stat (entry, file, dir, then) {
- if (nameIsBadForWindows(entry))
- then()
- else
- super.stat(entry, file, dir, then)
- }
+Promise._SomePromiseArray = SomePromiseArray;
+};
- // override parent onstat function to nix all symlinks
- onstat (st, entry, file, dir, then) {
- if (st.isSymbolicLink())
- then()
- else
- super.onstat(st, entry, file, dir, then)
- }
- onReadIgnoreFile (file, data, then) {
- if (file === 'package.json')
- try {
- const ig = path.resolve(this.path, file)
- this.onPackageJson(ig, normalizePackageBin(JSON.parse(data)), then)
- } catch (er) {
- // ignore package.json files that are not json
- then()
- }
- else
- super.onReadIgnoreFile(file, data, then)
+/***/ }),
+/* 85 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+
+module.exports = __webpack_require__(260)
+
+
+/***/ }),
+/* 86 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+var rng = __webpack_require__(139);
+var bytesToUuid = __webpack_require__(105);
+
+// **`v1()` - Generate time-based UUID**
+//
+// Inspired by https://github.com/LiosK/UUID.js
+// and http://docs.python.org/library/uuid.html
+
+var _nodeId;
+var _clockseq;
+
+// Previous uuid creation time
+var _lastMSecs = 0;
+var _lastNSecs = 0;
+
+// See https://github.com/uuidjs/uuid for API details
+function v1(options, buf, offset) {
+ var i = buf && offset || 0;
+ var b = buf || [];
+
+ options = options || {};
+ var node = options.node || _nodeId;
+ var clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq;
+
+ // node and clockseq need to be initialized to random values if they're not
+ // specified. We do this lazily to minimize issues related to insufficient
+ // system entropy. See #189
+ if (node == null || clockseq == null) {
+ var seedBytes = rng();
+ if (node == null) {
+ // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
+ node = _nodeId = [
+ seedBytes[0] | 0x01,
+ seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]
+ ];
+ }
+ if (clockseq == null) {
+ // Per 4.2.2, randomize (14 bit) clockseq
+ clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;
+ }
}
- sort (a, b) {
- return sort(a, b)
+ // UUID timestamps are 100 nano-second units since the Gregorian epoch,
+ // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so
+ // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
+ // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
+ var msecs = options.msecs !== undefined ? options.msecs : new Date().getTime();
+
+ // Per 4.2.1.2, use count of uuid's generated during the current clock
+ // cycle to simulate higher resolution clock
+ var nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1;
+
+ // Time since last uuid creation (in msecs)
+ var dt = (msecs - _lastMSecs) + (nsecs - _lastNSecs)/10000;
+
+ // Per 4.2.1.2, Bump clockseq on clock regression
+ if (dt < 0 && options.clockseq === undefined) {
+ clockseq = clockseq + 1 & 0x3fff;
}
-}
-class Walker extends npmWalker(IgnoreWalker) {
- walker (entry, then) {
- new Walker(this.walkerOpt(entry)).on('done', then).start()
+ // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
+ // time interval
+ if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {
+ nsecs = 0;
}
-}
-class WalkerSync extends npmWalker(IgnoreWalkerSync) {
- walker (entry, then) {
- new WalkerSync(this.walkerOpt(entry)).start()
- then()
+ // Per 4.2.1.2 Throw error if too many uuids are requested
+ if (nsecs >= 10000) {
+ throw new Error('uuid.v1(): Can\'t create more than 10M uuids/sec');
}
-}
-const walk = (options, callback) => {
- options = options || {}
- const p = new Promise((resolve, reject) => {
- const bw = new BundleWalker(options)
- bw.on('done', bundled => {
- options.bundled = bundled
- options.packageJsonCache = bw.packageJsonCache
- new Walker(options).on('done', resolve).on('error', reject).start()
- })
- bw.start()
- })
- return callback ? p.then(res => callback(null, res), callback) : p
-}
+ _lastMSecs = msecs;
+ _lastNSecs = nsecs;
+ _clockseq = clockseq;
-const walkSync = options => {
- options = options || {}
- const bw = new BundleWalkerSync(options).start()
- options.bundled = bw.result
- options.packageJsonCache = bw.packageJsonCache
- const walker = new WalkerSync(options)
- walker.start()
- return walker.result
-}
+ // Per 4.1.4 - Convert from unix epoch to Gregorian epoch
+ msecs += 12219292800000;
-// optimize for compressibility
-// extname, then basename, then locale alphabetically
-// https://twitter.com/isntitvacant/status/1131094910923231232
-const sort = (a, b) => {
- const exta = path.extname(a).toLowerCase()
- const extb = path.extname(b).toLowerCase()
- const basea = path.basename(a).toLowerCase()
- const baseb = path.basename(b).toLowerCase()
+ // `time_low`
+ var tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;
+ b[i++] = tl >>> 24 & 0xff;
+ b[i++] = tl >>> 16 & 0xff;
+ b[i++] = tl >>> 8 & 0xff;
+ b[i++] = tl & 0xff;
- return exta.localeCompare(extb) ||
- basea.localeCompare(baseb) ||
- a.localeCompare(b)
-}
+ // `time_mid`
+ var tmh = (msecs / 0x100000000 * 10000) & 0xfffffff;
+ b[i++] = tmh >>> 8 & 0xff;
+ b[i++] = tmh & 0xff;
+
+ // `time_high_and_version`
+ b[i++] = tmh >>> 24 & 0xf | 0x10; // include version
+ b[i++] = tmh >>> 16 & 0xff;
+ // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
+ b[i++] = clockseq >>> 8 | 0x80;
-module.exports = walk
-walk.sync = walkSync
-walk.Walker = Walker
-walk.WalkerSync = WalkerSync
+ // `clock_seq_low`
+ b[i++] = clockseq & 0xff;
+
+ // `node`
+ for (var n = 0; n < 6; ++n) {
+ b[i + n] = node[n];
+ }
+
+ return buf ? buf : bytesToUuid(b);
+}
+
+module.exports = v1;
/***/ }),
-/* 111 */
+/* 87 */
/***/ (function(module) {
-module.exports = bindActor
-function bindActor () {
- var args =
- Array.prototype.slice.call
- (arguments) // jswtf.
- , obj = null
- , fn
- if (typeof args[0] === "object") {
- obj = args.shift()
- fn = args.shift()
- if (typeof fn === "string")
- fn = obj[ fn ]
- } else fn = args.shift()
- return function (cb) {
- fn.apply(obj, args.concat(cb)) }
+module.exports = require("os");
+
+/***/ }),
+/* 88 */,
+/* 89 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+/*!
+ * Copyright (c) 2015, Salesforce.com, Inc.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ *
+ * 3. Neither the name of Salesforce.com nor the names of its contributors may
+ * be used to endorse or promote products derived from this software without
+ * specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+
+const pubsuffix = __webpack_require__(562);
+
+// Gives the permutation of all possible domainMatch()es of a given domain. The
+// array is in shortest-to-longest order. Handy for indexing.
+const SPECIAL_USE_DOMAINS = ["local"]; // RFC 6761
+function permuteDomain(domain, allowSpecialUseDomain) {
+ let pubSuf = null;
+ if (allowSpecialUseDomain) {
+ const domainParts = domain.split(".");
+ if (SPECIAL_USE_DOMAINS.includes(domainParts[domainParts.length - 1])) {
+ pubSuf = `${domainParts[domainParts.length - 2]}.${
+ domainParts[domainParts.length - 1]
+ }`;
+ } else {
+ pubSuf = pubsuffix.getPublicSuffix(domain);
+ }
+ } else {
+ pubSuf = pubsuffix.getPublicSuffix(domain);
+ }
+
+ if (!pubSuf) {
+ return null;
+ }
+ if (pubSuf == domain) {
+ return [domain];
+ }
+
+ const prefix = domain.slice(0, -(pubSuf.length + 1)); // ".example.com"
+ const parts = prefix.split(".").reverse();
+ let cur = pubSuf;
+ const permutations = [cur];
+ while (parts.length) {
+ cur = `${parts.shift()}.${cur}`;
+ permutations.push(cur);
+ }
+ return permutations;
}
+exports.permuteDomain = permuteDomain;
+
/***/ }),
-/* 112 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+/* 90 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
-module.exports = __webpack_require__(146);
-module.exports.HttpsAgent = __webpack_require__(628);
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = void 0;
+
+var _v = _interopRequireDefault(__webpack_require__(241));
+
+var _sha = _interopRequireDefault(__webpack_require__(616));
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+const v5 = (0, _v.default)('v5', 0x50, _sha.default);
+var _default = v5;
+exports.default = _default;
/***/ }),
-/* 113 */
+/* 91 */
/***/ (function(module, __unusedexports, __webpack_require__) {
-"use strict";
+var serialOrdered = __webpack_require__(892);
+// Public API
+module.exports = serial;
-module.exports = __webpack_require__(964)
+/**
+ * Runs iterator over provided array elements in series
+ *
+ * @param {array|object} list - array or object (named list) to iterate over
+ * @param {function} iterator - iterator to run
+ * @param {function} callback - invoked when all elements processed
+ * @returns {function} - jobs terminator
+ */
+function serial(list, iterator, callback)
+{
+ return serialOrdered(list, iterator, null, callback);
+}
/***/ }),
-/* 114 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
+/* 92 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
- result["default"] = mod;
- return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const core = __importStar(__webpack_require__(470));
-const http_client_1 = __webpack_require__(22);
-const auth_1 = __webpack_require__(733);
-const crypto = __importStar(__webpack_require__(417));
-const fs = __importStar(__webpack_require__(747));
-const url_1 = __webpack_require__(835);
-const utils = __importStar(__webpack_require__(15));
-const constants_1 = __webpack_require__(931);
-const downloadUtils_1 = __webpack_require__(251);
-const options_1 = __webpack_require__(538);
-const requestUtils_1 = __webpack_require__(899);
-const versionSalt = '1.0';
-function getCacheApiUrl(resource) {
- // Ideally we just use ACTIONS_CACHE_URL
- const baseUrl = (process.env['ACTIONS_CACHE_URL'] ||
- process.env['ACTIONS_RUNTIME_URL'] ||
- '').replace('pipelines', 'artifactcache');
- if (!baseUrl) {
- throw new Error('Cache Service Url not found, unable to restore cache.');
- }
- const url = `${baseUrl}_apis/artifactcache/${resource}`;
- core.debug(`Resource Url: ${url}`);
- return url;
-}
-function createAcceptHeader(type, apiVersion) {
- return `${type};api-version=${apiVersion}`;
-}
-function getRequestOptions() {
- const requestOptions = {
- headers: {
- Accept: createAcceptHeader('application/json', '6.0-preview.1')
- }
- };
- return requestOptions;
-}
-function createHttpClient() {
- const token = process.env['ACTIONS_RUNTIME_TOKEN'] || '';
- const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
- return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
-}
-function getCacheVersion(paths, compressionMethod) {
- const components = paths.concat(!compressionMethod || compressionMethod === constants_1.CompressionMethod.Gzip
- ? []
- : [compressionMethod]);
- // Add salt to cache version to support breaking changes in cache entry
- components.push(versionSalt);
- return crypto
- .createHash('sha256')
- .update(components.join('|'))
- .digest('hex');
-}
-exports.getCacheVersion = getCacheVersion;
-function getCacheEntry(keys, paths, options) {
- return __awaiter(this, void 0, void 0, function* () {
- const httpClient = createHttpClient();
- const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod);
- const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
- const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
- if (response.statusCode === 204) {
- return null;
- }
- if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) {
- throw new Error(`Cache service responded with ${response.statusCode}`);
- }
- const cacheResult = response.result;
- const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
- if (!cacheDownloadUrl) {
- throw new Error('Cache not found.');
- }
- core.setSecret(cacheDownloadUrl);
- core.debug(`Cache Result:`);
- core.debug(JSON.stringify(cacheResult));
- return cacheResult;
- });
-}
-exports.getCacheEntry = getCacheEntry;
-function downloadCache(archiveLocation, archivePath, options) {
- return __awaiter(this, void 0, void 0, function* () {
- const archiveUrl = new url_1.URL(archiveLocation);
- const downloadOptions = options_1.getDownloadOptions(options);
- if (downloadOptions.useAzureSdk &&
- archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
- // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
- yield downloadUtils_1.downloadCacheStorageSDK(archiveLocation, archivePath, downloadOptions);
- }
- else {
- // Otherwise, download using the Actions http-client.
- yield downloadUtils_1.downloadCacheHttpClient(archiveLocation, archivePath);
- }
- });
-}
-exports.downloadCache = downloadCache;
-// Reserve Cache
-function reserveCache(key, paths, options) {
- var _a, _b;
- return __awaiter(this, void 0, void 0, function* () {
- const httpClient = createHttpClient();
- const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod);
- const reserveCacheRequest = {
- key,
- version
- };
- const response = yield requestUtils_1.retryTypedResponse('reserveCache', () => __awaiter(this, void 0, void 0, function* () {
- return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest);
- }));
- return (_b = (_a = response === null || response === void 0 ? void 0 : response.result) === null || _a === void 0 ? void 0 : _a.cacheId) !== null && _b !== void 0 ? _b : -1;
- });
-}
-exports.reserveCache = reserveCache;
-function getContentRange(start, end) {
- // Format: `bytes start-end/filesize
- // start and end are inclusive
- // filesize can be *
- // For a 200 byte chunk starting at byte 0:
- // Content-Range: bytes 0-199/*
- return `bytes ${start}-${end}/*`;
-}
-function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
- return __awaiter(this, void 0, void 0, function* () {
- core.debug(`Uploading chunk of size ${end -
- start +
- 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
- const additionalHeaders = {
- 'Content-Type': 'application/octet-stream',
- 'Content-Range': getContentRange(start, end)
- };
- const uploadChunkResponse = yield requestUtils_1.retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () {
- return httpClient.sendStream('PATCH', resourceUrl, openStream(), additionalHeaders);
- }));
- if (!requestUtils_1.isSuccessStatusCode(uploadChunkResponse.message.statusCode)) {
- throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`);
- }
- });
-}
-function uploadFile(httpClient, cacheId, archivePath, options) {
- return __awaiter(this, void 0, void 0, function* () {
- // Upload Chunks
- const fileSize = fs.statSync(archivePath).size;
- const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
- const fd = fs.openSync(archivePath, 'r');
- const uploadOptions = options_1.getUploadOptions(options);
- const concurrency = utils.assertDefined('uploadConcurrency', uploadOptions.uploadConcurrency);
- const maxChunkSize = utils.assertDefined('uploadChunkSize', uploadOptions.uploadChunkSize);
- const parallelUploads = [...new Array(concurrency).keys()];
- core.debug('Awaiting all uploads');
- let offset = 0;
- try {
- yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {
- while (offset < fileSize) {
- const chunkSize = Math.min(fileSize - offset, maxChunkSize);
- const start = offset;
- const end = offset + chunkSize - 1;
- offset += maxChunkSize;
- yield uploadChunk(httpClient, resourceUrl, () => fs
- .createReadStream(archivePath, {
- fd,
- start,
- end,
- autoClose: false
- })
- .on('error', error => {
- throw new Error(`Cache upload failed because file read failed with ${error.message}`);
- }), start, end);
- }
- })));
- }
- finally {
- fs.closeSync(fd);
- }
- return;
- });
-}
-function commitCache(httpClient, cacheId, filesize) {
- return __awaiter(this, void 0, void 0, function* () {
- const commitCacheRequest = { size: filesize };
- return yield requestUtils_1.retryTypedResponse('commitCache', () => __awaiter(this, void 0, void 0, function* () {
- return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest);
- }));
- });
-}
-function saveCache(cacheId, archivePath, options) {
- return __awaiter(this, void 0, void 0, function* () {
- const httpClient = createHttpClient();
- core.debug('Upload cache');
- yield uploadFile(httpClient, cacheId, archivePath, options);
- // Commit Cache
- core.debug('Commiting cache');
- const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath);
- const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
- if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) {
- throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`);
- }
- core.info('Cache saved successfully');
- });
-}
-exports.saveCache = saveCache;
-//# sourceMappingURL=cacheHttpClient.js.map
-/***/ }),
-/* 115 */,
-/* 116 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+// Description of supported double byte encodings and aliases.
+// Tables are not require()-d until they are needed to speed up library load.
+// require()-s are direct to support Browserify.
-"use strict";
+module.exports = {
+
+ // == Japanese/ShiftJIS ====================================================
+ // All japanese encodings are based on JIS X set of standards:
+ // JIS X 0201 - Single-byte encoding of ASCII + ¥ + Kana chars at 0xA1-0xDF.
+ // JIS X 0208 - Main set of 6879 characters, placed in 94x94 plane, to be encoded by 2 bytes.
+ // Has several variations in 1978, 1983, 1990 and 1997.
+ // JIS X 0212 - Supplementary plane of 6067 chars in 94x94 plane. 1990. Effectively dead.
+ // JIS X 0213 - Extension and modern replacement of 0208 and 0212. Total chars: 11233.
+ // 2 planes, first is superset of 0208, second - revised 0212.
+ // Introduced in 2000, revised 2004. Some characters are in Unicode Plane 2 (0x2xxxx)
+ // Byte encodings are:
+ // * Shift_JIS: Compatible with 0201, uses not defined chars in top half as lead bytes for double-byte
+ // encoding of 0208. Lead byte ranges: 0x81-0x9F, 0xE0-0xEF; Trail byte ranges: 0x40-0x7E, 0x80-0x9E, 0x9F-0xFC.
+ // Windows CP932 is a superset of Shift_JIS. Some companies added more chars, notably KDDI.
+ // * EUC-JP: Up to 3 bytes per character. Used mostly on *nixes.
+ // 0x00-0x7F - lower part of 0201
+ // 0x8E, 0xA1-0xDF - upper part of 0201
+ // (0xA1-0xFE)x2 - 0208 plane (94x94).
+ // 0x8F, (0xA1-0xFE)x2 - 0212 plane (94x94).
+ // * JIS X 208: 7-bit, direct encoding of 0208. Byte ranges: 0x21-0x7E (94 values). Uncommon.
+ // Used as-is in ISO2022 family.
+ // * ISO2022-JP: Stateful encoding, with escape sequences to switch between ASCII,
+ // 0201-1976 Roman, 0208-1978, 0208-1983.
+ // * ISO2022-JP-1: Adds esc seq for 0212-1990.
+ // * ISO2022-JP-2: Adds esc seq for GB2313-1980, KSX1001-1992, ISO8859-1, ISO8859-7.
+ // * ISO2022-JP-3: Adds esc seq for 0201-1976 Kana set, 0213-2000 Planes 1, 2.
+ // * ISO2022-JP-2004: Adds 0213-2004 Plane 1.
+ //
+ // After JIS X 0213 appeared, Shift_JIS-2004, EUC-JISX0213 and ISO2022-JP-2004 followed, with just changing the planes.
+ //
+ // Overall, it seems that it's a mess :( http://www8.plala.or.jp/tkubota1/unicode-symbols-map2.html
-const figgyPudding = __webpack_require__(122)
-const getStream = __webpack_require__(145)
-const npa = __webpack_require__(482)
-const npmFetch = __webpack_require__(789)
-const {PassThrough} = __webpack_require__(794)
-const validate = __webpack_require__(772)
+ 'shiftjis': {
+ type: '_dbcs',
+ table: function() { return __webpack_require__(546) },
+ encodeAdd: {'\u00a5': 0x5C, '\u203E': 0x7E},
+ encodeSkipVals: [{from: 0xED40, to: 0xF940}],
+ },
+ 'csshiftjis': 'shiftjis',
+ 'mskanji': 'shiftjis',
+ 'sjis': 'shiftjis',
+ 'windows31j': 'shiftjis',
+ 'ms31j': 'shiftjis',
+ 'xsjis': 'shiftjis',
+ 'windows932': 'shiftjis',
+ 'ms932': 'shiftjis',
+ '932': 'shiftjis',
+ 'cp932': 'shiftjis',
-const AccessConfig = figgyPudding({
- Promise: {default: () => Promise}
-})
+ 'eucjp': {
+ type: '_dbcs',
+ table: function() { return __webpack_require__(701) },
+ encodeAdd: {'\u00a5': 0x5C, '\u203E': 0x7E},
+ },
-const eu = encodeURIComponent
-const npar = spec => {
- spec = npa(spec)
- if (!spec.registry) {
- throw new Error('`spec` must be a registry spec')
- }
- return spec
-}
+ // TODO: KDDI extension to Shift_JIS
+ // TODO: IBM CCSID 942 = CP932, but F0-F9 custom chars and other char changes.
+ // TODO: IBM CCSID 943 = Shift_JIS = CP932 with original Shift_JIS lower 128 chars.
-const cmd = module.exports = {}
-cmd.public = (spec, opts) => setAccess(spec, 'public', opts)
-cmd.restricted = (spec, opts) => setAccess(spec, 'restricted', opts)
-function setAccess (spec, access, opts) {
- opts = AccessConfig(opts)
- return pwrap(opts, () => {
- spec = npar(spec)
- validate('OSO', [spec, access, opts])
- const uri = `/-/package/${eu(spec.name)}/access`
- return npmFetch(uri, opts.concat({
- method: 'POST',
- body: {access},
- spec
- }))
- }).then(res => res.body.resume() && true)
-}
+ // == Chinese/GBK ==========================================================
+ // http://en.wikipedia.org/wiki/GBK
+ // We mostly implement W3C recommendation: https://www.w3.org/TR/encoding/#gbk-encoder
-cmd.grant = (spec, entity, permissions, opts) => {
- opts = AccessConfig(opts)
- return pwrap(opts, () => {
- spec = npar(spec)
- const {scope, team} = splitEntity(entity)
- validate('OSSSO', [spec, scope, team, permissions, opts])
- if (permissions !== 'read-write' && permissions !== 'read-only') {
- throw new Error('`permissions` must be `read-write` or `read-only`. Got `' + permissions + '` instead')
- }
- const uri = `/-/team/${eu(scope)}/${eu(team)}/package`
- return npmFetch(uri, opts.concat({
- method: 'PUT',
- body: {package: spec.name, permissions},
- scope,
- spec,
- ignoreBody: true
- }))
- }).then(() => true)
-}
+ // Oldest GB2312 (1981, ~7600 chars) is a subset of CP936
+ 'gb2312': 'cp936',
+ 'gb231280': 'cp936',
+ 'gb23121980': 'cp936',
+ 'csgb2312': 'cp936',
+ 'csiso58gb231280': 'cp936',
+ 'euccn': 'cp936',
-cmd.revoke = (spec, entity, opts) => {
- opts = AccessConfig(opts)
- return pwrap(opts, () => {
- spec = npar(spec)
- const {scope, team} = splitEntity(entity)
- validate('OSSO', [spec, scope, team, opts])
- const uri = `/-/team/${eu(scope)}/${eu(team)}/package`
- return npmFetch(uri, opts.concat({
- method: 'DELETE',
- body: {package: spec.name},
- scope,
- spec,
- ignoreBody: true
- }))
- }).then(() => true)
-}
+ // Microsoft's CP936 is a subset and approximation of GBK.
+ 'windows936': 'cp936',
+ 'ms936': 'cp936',
+ '936': 'cp936',
+ 'cp936': {
+ type: '_dbcs',
+ table: function() { return __webpack_require__(680) },
+ },
-cmd.lsPackages = (entity, opts) => {
- opts = AccessConfig(opts)
- return pwrap(opts, () => {
- return getStream.array(
- cmd.lsPackages.stream(entity, opts)
- ).then(data => data.reduce((acc, [key, val]) => {
- if (!acc) {
- acc = {}
- }
- acc[key] = val
- return acc
- }, null))
- })
-}
+ // GBK (~22000 chars) is an extension of CP936 that added user-mapped chars and some other.
+ 'gbk': {
+ type: '_dbcs',
+ table: function() { return __webpack_require__(680).concat(__webpack_require__(810)) },
+ },
+ 'xgbk': 'gbk',
+ 'isoir58': 'gbk',
-cmd.lsPackages.stream = (entity, opts) => {
- validate('SO|SZ', [entity, opts])
- opts = AccessConfig(opts)
- const {scope, team} = splitEntity(entity)
- let uri
- if (team) {
- uri = `/-/team/${eu(scope)}/${eu(team)}/package`
- } else {
- uri = `/-/org/${eu(scope)}/package`
- }
- opts = opts.concat({
- query: {format: 'cli'},
- mapJson (value, [key]) {
- if (value === 'read') {
- return [key, 'read-only']
- } else if (value === 'write') {
- return [key, 'read-write']
- } else {
- return [key, value]
- }
- }
- })
- const ret = new PassThrough({objectMode: true})
- npmFetch.json.stream(uri, '*', opts).on('error', err => {
- if (err.code === 'E404' && !team) {
- uri = `/-/user/${eu(scope)}/package`
- npmFetch.json.stream(uri, '*', opts).on(
- 'error', err => ret.emit('error', err)
- ).pipe(ret)
- } else {
- ret.emit('error', err)
- }
- }).pipe(ret)
- return ret
-}
+ // GB18030 is an algorithmic extension of GBK.
+ // Main source: https://www.w3.org/TR/encoding/#gbk-encoder
+ // http://icu-project.org/docs/papers/gb18030.html
+ // http://source.icu-project.org/repos/icu/data/trunk/charset/data/xml/gb-18030-2000.xml
+ // http://www.khngai.com/chinese/charmap/tblgbk.php?page=0
+ 'gb18030': {
+ type: '_dbcs',
+ table: function() { return __webpack_require__(680).concat(__webpack_require__(810)) },
+ gb18030: function() { return __webpack_require__(829) },
+ encodeSkipVals: [0x80],
+ encodeAdd: {'€': 0xA2E3},
+ },
-cmd.lsCollaborators = (spec, user, opts) => {
- if (typeof user === 'object' && !opts) {
- opts = user
- user = undefined
- }
- opts = AccessConfig(opts)
- return pwrap(opts, () => {
- return getStream.array(
- cmd.lsCollaborators.stream(spec, user, opts)
- ).then(data => data.reduce((acc, [key, val]) => {
- if (!acc) {
- acc = {}
- }
- acc[key] = val
- return acc
- }, null))
- })
-}
+ 'chinese': 'gb18030',
-cmd.lsCollaborators.stream = (spec, user, opts) => {
- if (typeof user === 'object' && !opts) {
- opts = user
- user = undefined
- }
- opts = AccessConfig(opts)
- spec = npar(spec)
- validate('OSO|OZO', [spec, user, opts])
- const uri = `/-/package/${eu(spec.name)}/collaborators`
- return npmFetch.json.stream(uri, '*', opts.concat({
- query: {format: 'cli', user: user || undefined},
- mapJson (value, [key]) {
- if (value === 'read') {
- return [key, 'read-only']
- } else if (value === 'write') {
- return [key, 'read-write']
- } else {
- return [key, value]
- }
- }
- }))
-}
-cmd.tfaRequired = (spec, opts) => setRequires2fa(spec, true, opts)
-cmd.tfaNotRequired = (spec, opts) => setRequires2fa(spec, false, opts)
-function setRequires2fa (spec, required, opts) {
- opts = AccessConfig(opts)
- return new opts.Promise((resolve, reject) => {
- spec = npar(spec)
- validate('OBO', [spec, required, opts])
- const uri = `/-/package/${eu(spec.name)}/access`
- return npmFetch(uri, opts.concat({
- method: 'POST',
- body: {publish_requires_tfa: required},
- spec,
- ignoreBody: true
- })).then(resolve, reject)
- }).then(() => true)
-}
+ // == Korean ===============================================================
+ // EUC-KR, KS_C_5601 and KS X 1001 are exactly the same.
+ 'windows949': 'cp949',
+ 'ms949': 'cp949',
+ '949': 'cp949',
+ 'cp949': {
+ type: '_dbcs',
+ table: function() { return __webpack_require__(488) },
+ },
-cmd.edit = () => {
- throw new Error('Not implemented yet')
-}
+ 'cseuckr': 'cp949',
+ 'csksc56011987': 'cp949',
+ 'euckr': 'cp949',
+ 'isoir149': 'cp949',
+ 'korean': 'cp949',
+ 'ksc56011987': 'cp949',
+ 'ksc56011989': 'cp949',
+ 'ksc5601': 'cp949',
-function splitEntity (entity = '') {
- let [, scope, team] = entity.match(/^@?([^:]+)(?::(.*))?$/) || []
- return {scope, team}
-}
-function pwrap (opts, fn) {
- return new opts.Promise((resolve, reject) => {
- fn().then(resolve, reject)
- })
-}
+ // == Big5/Taiwan/Hong Kong ================================================
+ // There are lots of tables for Big5 and cp950. Please see the following links for history:
+ // http://moztw.org/docs/big5/ http://www.haible.de/bruno/charsets/conversion-tables/Big5.html
+ // Variations, in roughly number of defined chars:
+ // * Windows CP 950: Microsoft variant of Big5. Canonical: http://www.unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP950.TXT
+ // * Windows CP 951: Microsoft variant of Big5-HKSCS-2001. Seems to be never public. http://me.abelcheung.org/articles/research/what-is-cp951/
+ // * Big5-2003 (Taiwan standard) almost superset of cp950.
+ // * Unicode-at-on (UAO) / Mozilla 1.8. Falling out of use on the Web. Not supported by other browsers.
+ // * Big5-HKSCS (-2001, -2004, -2008). Hong Kong standard.
+ // many unicode code points moved from PUA to Supplementary plane (U+2XXXX) over the years.
+ // Plus, it has 4 combining sequences.
+ // Seems that Mozilla refused to support it for 10 yrs. https://bugzilla.mozilla.org/show_bug.cgi?id=162431 https://bugzilla.mozilla.org/show_bug.cgi?id=310299
+ // because big5-hkscs is the only encoding to include astral characters in non-algorithmic way.
+ // Implementations are not consistent within browsers; sometimes labeled as just big5.
+ // MS Internet Explorer switches from big5 to big5-hkscs when a patch applied.
+ // Great discussion & recap of what's going on https://bugzilla.mozilla.org/show_bug.cgi?id=912470#c31
+ // In the encoder, it might make sense to support encoding old PUA mappings to Big5 bytes seq-s.
+ // Official spec: http://www.ogcio.gov.hk/en/business/tech_promotion/ccli/terms/doc/2003cmp_2008.txt
+ // http://www.ogcio.gov.hk/tc/business/tech_promotion/ccli/terms/doc/hkscs-2008-big5-iso.txt
+ //
+ // Current understanding of how to deal with Big5(-HKSCS) is in the Encoding Standard, http://encoding.spec.whatwg.org/#big5-encoder
+ // Unicode mapping (http://www.unicode.org/Public/MAPPINGS/OBSOLETE/EASTASIA/OTHER/BIG5.TXT) is said to be wrong.
+ 'windows950': 'cp950',
+ 'ms950': 'cp950',
+ '950': 'cp950',
+ 'cp950': {
+ type: '_dbcs',
+ table: function() { return __webpack_require__(801) },
+ },
-/***/ }),
-/* 117 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
+ // Big5 has many variations and is an extension of cp950. We use Encoding Standard's as a consensus.
+ 'big5': 'big5hkscs',
+ 'big5hkscs': {
+ type: '_dbcs',
+ table: function() { return __webpack_require__(801).concat(__webpack_require__(958)) },
+ encodeSkipVals: [0xa2cc],
+ },
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-var pathModule = __webpack_require__(622);
-var isWindows = process.platform === 'win32';
-var fs = __webpack_require__(747);
+ 'cnbig5': 'big5hkscs',
+ 'csbig5': 'big5hkscs',
+ 'xxbig5': 'big5hkscs',
+};
-// JavaScript implementation of realpath, ported from node pre-v6
-var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG);
+/***/ }),
+/* 93 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
-function rethrow() {
- // Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and
- // is fairly slow to generate.
- var callback;
- if (DEBUG) {
- var backtrace = new Error;
- callback = debugCallback;
- } else
- callback = missingCallback;
+module.exports = minimatch
+minimatch.Minimatch = Minimatch
- return callback;
+var path = { sep: '/' }
+try {
+ path = __webpack_require__(622)
+} catch (er) {}
- function debugCallback(err) {
- if (err) {
- backtrace.message = err.message;
- err = backtrace;
- missingCallback(err);
- }
- }
+var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
+var expand = __webpack_require__(306)
- function missingCallback(err) {
- if (err) {
- if (process.throwDeprecation)
- throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs
- else if (!process.noDeprecation) {
- var msg = 'fs: missing callback ' + (err.stack || err.message);
- if (process.traceDeprecation)
- console.trace(msg);
- else
- console.error(msg);
- }
- }
- }
+var plTypes = {
+ '!': { open: '(?:(?!(?:', close: '))[^/]*?)'},
+ '?': { open: '(?:', close: ')?' },
+ '+': { open: '(?:', close: ')+' },
+ '*': { open: '(?:', close: ')*' },
+ '@': { open: '(?:', close: ')' }
}
-function maybeCallback(cb) {
- return typeof cb === 'function' ? cb : rethrow();
-}
+// any single thing other than /
+// don't need to escape / when using new RegExp()
+var qmark = '[^/]'
-var normalize = pathModule.normalize;
+// * => any number of characters
+var star = qmark + '*?'
-// Regexp that finds the next partion of a (partial) path
-// result is [base_with_slash, base], e.g. ['somedir/', 'somedir']
-if (isWindows) {
- var nextPartRe = /(.*?)(?:[\/\\]+|$)/g;
-} else {
- var nextPartRe = /(.*?)(?:[\/]+|$)/g;
-}
+// ** when dots are allowed. Anything goes, except .. and .
+// not (^ or / followed by one or two dots followed by $ or /),
+// followed by anything, any number of times.
+var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?'
-// Regex to find the device root, including trailing slash. E.g. 'c:\\'.
-if (isWindows) {
- var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/;
-} else {
- var splitRootRe = /^[\/]*/;
+// not a ^ or / followed by a dot,
+// followed by anything, any number of times.
+var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?'
+
+// characters that need to be escaped in RegExp.
+var reSpecials = charSet('().*{}+?[]^$\\!')
+
+// "abc" -> { a:true, b:true, c:true }
+function charSet (s) {
+ return s.split('').reduce(function (set, c) {
+ set[c] = true
+ return set
+ }, {})
}
-exports.realpathSync = function realpathSync(p, cache) {
- // make p is absolute
- p = pathModule.resolve(p);
+// normalizes slashes.
+var slashSplit = /\/+/
- if (cache && Object.prototype.hasOwnProperty.call(cache, p)) {
- return cache[p];
+minimatch.filter = filter
+function filter (pattern, options) {
+ options = options || {}
+ return function (p, i, list) {
+ return minimatch(p, pattern, options)
}
+}
- var original = p,
- seenLinks = {},
- knownHard = {};
+function ext (a, b) {
+ a = a || {}
+ b = b || {}
+ var t = {}
+ Object.keys(b).forEach(function (k) {
+ t[k] = b[k]
+ })
+ Object.keys(a).forEach(function (k) {
+ t[k] = a[k]
+ })
+ return t
+}
- // current character position in p
- var pos;
- // the partial path so far, including a trailing slash if any
- var current;
- // the partial path without a trailing slash (except when pointing at a root)
- var base;
- // the partial path scanned in the previous round, with slash
- var previous;
+minimatch.defaults = function (def) {
+ if (!def || !Object.keys(def).length) return minimatch
- start();
+ var orig = minimatch
- function start() {
- // Skip over roots
- var m = splitRootRe.exec(p);
- pos = m[0].length;
- current = m[0];
- base = m[0];
- previous = '';
+ var m = function minimatch (p, pattern, options) {
+ return orig.minimatch(p, pattern, ext(def, options))
+ }
- // On windows, check that the root exists. On unix there is no need.
- if (isWindows && !knownHard[base]) {
- fs.lstatSync(base);
- knownHard[base] = true;
- }
+ m.Minimatch = function Minimatch (pattern, options) {
+ return new orig.Minimatch(pattern, ext(def, options))
}
- // walk down the path, swapping out linked pathparts for their real
- // values
- // NB: p.length changes.
- while (pos < p.length) {
- // find the next part
- nextPartRe.lastIndex = pos;
- var result = nextPartRe.exec(p);
- previous = current;
- current += result[0];
- base = previous + result[1];
- pos = nextPartRe.lastIndex;
+ return m
+}
- // continue if not a symlink
- if (knownHard[base] || (cache && cache[base] === base)) {
- continue;
- }
+Minimatch.defaults = function (def) {
+ if (!def || !Object.keys(def).length) return Minimatch
+ return minimatch.defaults(def).Minimatch
+}
- var resolvedLink;
- if (cache && Object.prototype.hasOwnProperty.call(cache, base)) {
- // some known symbolic link. no need to stat again.
- resolvedLink = cache[base];
- } else {
- var stat = fs.lstatSync(base);
- if (!stat.isSymbolicLink()) {
- knownHard[base] = true;
- if (cache) cache[base] = base;
- continue;
- }
+function minimatch (p, pattern, options) {
+ if (typeof pattern !== 'string') {
+ throw new TypeError('glob pattern string required')
+ }
- // read the link if it wasn't read before
- // dev/ino always return 0 on windows, so skip the check.
- var linkTarget = null;
- if (!isWindows) {
- var id = stat.dev.toString(32) + ':' + stat.ino.toString(32);
- if (seenLinks.hasOwnProperty(id)) {
- linkTarget = seenLinks[id];
- }
- }
- if (linkTarget === null) {
- fs.statSync(base);
- linkTarget = fs.readlinkSync(base);
- }
- resolvedLink = pathModule.resolve(previous, linkTarget);
- // track this, if given a cache.
- if (cache) cache[base] = resolvedLink;
- if (!isWindows) seenLinks[id] = linkTarget;
- }
+ if (!options) options = {}
- // resolve the link, then start over
- p = pathModule.resolve(resolvedLink, p.slice(pos));
- start();
+ // shortcut: comments match nothing.
+ if (!options.nocomment && pattern.charAt(0) === '#') {
+ return false
}
- if (cache) cache[original] = p;
+ // "" only matches ""
+ if (pattern.trim() === '') return p === ''
- return p;
-};
+ return new Minimatch(pattern, options).match(p)
+}
+function Minimatch (pattern, options) {
+ if (!(this instanceof Minimatch)) {
+ return new Minimatch(pattern, options)
+ }
-exports.realpath = function realpath(p, cache, cb) {
- if (typeof cb !== 'function') {
- cb = maybeCallback(cache);
- cache = null;
+ if (typeof pattern !== 'string') {
+ throw new TypeError('glob pattern string required')
}
- // make p is absolute
- p = pathModule.resolve(p);
+ if (!options) options = {}
+ pattern = pattern.trim()
- if (cache && Object.prototype.hasOwnProperty.call(cache, p)) {
- return process.nextTick(cb.bind(null, null, cache[p]));
+ // windows support: need to use /, not \
+ if (path.sep !== '/') {
+ pattern = pattern.split(path.sep).join('/')
}
- var original = p,
- seenLinks = {},
- knownHard = {};
+ this.options = options
+ this.set = []
+ this.pattern = pattern
+ this.regexp = null
+ this.negate = false
+ this.comment = false
+ this.empty = false
- // current character position in p
- var pos;
- // the partial path so far, including a trailing slash if any
- var current;
- // the partial path without a trailing slash (except when pointing at a root)
- var base;
- // the partial path scanned in the previous round, with slash
- var previous;
+ // make the set of regexps etc.
+ this.make()
+}
- start();
+Minimatch.prototype.debug = function () {}
- function start() {
- // Skip over roots
- var m = splitRootRe.exec(p);
- pos = m[0].length;
- current = m[0];
- base = m[0];
- previous = '';
+Minimatch.prototype.make = make
+function make () {
+ // don't do it more than once.
+ if (this._made) return
- // On windows, check that the root exists. On unix there is no need.
- if (isWindows && !knownHard[base]) {
- fs.lstat(base, function(err) {
- if (err) return cb(err);
- knownHard[base] = true;
- LOOP();
- });
- } else {
- process.nextTick(LOOP);
- }
+ var pattern = this.pattern
+ var options = this.options
+
+ // empty patterns and comments match nothing.
+ if (!options.nocomment && pattern.charAt(0) === '#') {
+ this.comment = true
+ return
+ }
+ if (!pattern) {
+ this.empty = true
+ return
}
- // walk down the path, swapping out linked pathparts for their real
- // values
- function LOOP() {
- // stop if scanned past end of path
- if (pos >= p.length) {
- if (cache) cache[original] = p;
- return cb(null, p);
- }
+ // step 1: figure out negation, etc.
+ this.parseNegate()
- // find the next part
- nextPartRe.lastIndex = pos;
- var result = nextPartRe.exec(p);
- previous = current;
- current += result[0];
- base = previous + result[1];
- pos = nextPartRe.lastIndex;
+ // step 2: expand braces
+ var set = this.globSet = this.braceExpand()
- // continue if not a symlink
- if (knownHard[base] || (cache && cache[base] === base)) {
- return process.nextTick(LOOP);
- }
+ if (options.debug) this.debug = console.error
- if (cache && Object.prototype.hasOwnProperty.call(cache, base)) {
- // known symbolic link. no need to stat again.
- return gotResolvedLink(cache[base]);
- }
+ this.debug(this.pattern, set)
- return fs.lstat(base, gotStat);
+ // step 3: now we have a set, so turn each one into a series of path-portion
+ // matching patterns.
+ // These will be regexps, except in the case of "**", which is
+ // set to the GLOBSTAR object for globstar behavior,
+ // and will not contain any / characters
+ set = this.globParts = set.map(function (s) {
+ return s.split(slashSplit)
+ })
+
+ this.debug(this.pattern, set)
+
+ // glob --> regexps
+ set = set.map(function (s, si, set) {
+ return s.map(this.parse, this)
+ }, this)
+
+ this.debug(this.pattern, set)
+
+ // filter out everything that didn't compile properly.
+ set = set.filter(function (s) {
+ return s.indexOf(false) === -1
+ })
+
+ this.debug(this.pattern, set)
+
+ this.set = set
+}
+
+Minimatch.prototype.parseNegate = parseNegate
+function parseNegate () {
+ var pattern = this.pattern
+ var negate = false
+ var options = this.options
+ var negateOffset = 0
+
+ if (options.nonegate) return
+
+ for (var i = 0, l = pattern.length
+ ; i < l && pattern.charAt(i) === '!'
+ ; i++) {
+ negate = !negate
+ negateOffset++
}
- function gotStat(err, stat) {
- if (err) return cb(err);
+ if (negateOffset) this.pattern = pattern.substr(negateOffset)
+ this.negate = negate
+}
- // if not a symlink, skip to the next path part
- if (!stat.isSymbolicLink()) {
- knownHard[base] = true;
- if (cache) cache[base] = base;
- return process.nextTick(LOOP);
- }
+// Brace expansion:
+// a{b,c}d -> abd acd
+// a{b,}c -> abc ac
+// a{0..3}d -> a0d a1d a2d a3d
+// a{b,c{d,e}f}g -> abg acdfg acefg
+// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
+//
+// Invalid sets are not expanded.
+// a{2..}b -> a{2..}b
+// a{b}c -> a{b}c
+minimatch.braceExpand = function (pattern, options) {
+ return braceExpand(pattern, options)
+}
- // stat & read the link if not read before
- // call gotTarget as soon as the link target is known
- // dev/ino always return 0 on windows, so skip the check.
- if (!isWindows) {
- var id = stat.dev.toString(32) + ':' + stat.ino.toString(32);
- if (seenLinks.hasOwnProperty(id)) {
- return gotTarget(null, seenLinks[id], base);
- }
- }
- fs.stat(base, function(err) {
- if (err) return cb(err);
+Minimatch.prototype.braceExpand = braceExpand
- fs.readlink(base, function(err, target) {
- if (!isWindows) seenLinks[id] = target;
- gotTarget(err, target);
- });
- });
+function braceExpand (pattern, options) {
+ if (!options) {
+ if (this instanceof Minimatch) {
+ options = this.options
+ } else {
+ options = {}
+ }
}
- function gotTarget(err, target, base) {
- if (err) return cb(err);
+ pattern = typeof pattern === 'undefined'
+ ? this.pattern : pattern
- var resolvedLink = pathModule.resolve(previous, target);
- if (cache) cache[base] = resolvedLink;
- gotResolvedLink(resolvedLink);
+ if (typeof pattern === 'undefined') {
+ throw new TypeError('undefined pattern')
}
- function gotResolvedLink(resolvedLink) {
- // resolve the link, then start over
- p = pathModule.resolve(resolvedLink, p.slice(pos));
- start();
+ if (options.nobrace ||
+ !pattern.match(/\{.*\}/)) {
+ // shortcut. no need to expand.
+ return [pattern]
}
-};
+ return expand(pattern)
+}
-/***/ }),
-/* 118 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
+// parse a component of the expanded set.
+// At this point, no pattern may contain "/" in it
+// so we're going to return a 2d array, where each entry is the full
+// pattern, split on '/', and then turned into a regular expression.
+// A regexp is made at the end which joins each array with an
+// escaped /, and another full one which joins each regexp with |.
+//
+// Following the lead of Bash 4.1, note that "**" only has special meaning
+// when it is the *only* thing in a path portion. Otherwise, any series
+// of * is equivalent to a single *. Globstar behavior is enabled by
+// default, and can be disabled by setting options.noglobstar.
+Minimatch.prototype.parse = parse
+var SUBPARSE = {}
+function parse (pattern, isSub) {
+ if (pattern.length > 1024 * 64) {
+ throw new TypeError('pattern is too long')
+ }
-"use strict";
+ var options = this.options
-Object.defineProperty(exports, "__esModule", { value: true });
-const utils_1 = __webpack_require__(756);
-// The default Buffer size if one is not provided.
-const DEFAULT_SMARTBUFFER_SIZE = 4096;
-// The default string encoding to use for reading/writing strings.
-const DEFAULT_SMARTBUFFER_ENCODING = 'utf8';
-class SmartBuffer {
- /**
- * Creates a new SmartBuffer instance.
- *
- * @param options { SmartBufferOptions } The SmartBufferOptions to apply to this instance.
- */
- constructor(options) {
- this.length = 0;
- this._encoding = DEFAULT_SMARTBUFFER_ENCODING;
- this._writeOffset = 0;
- this._readOffset = 0;
- if (SmartBuffer.isSmartBufferOptions(options)) {
- // Checks for encoding
- if (options.encoding) {
- utils_1.checkEncoding(options.encoding);
- this._encoding = options.encoding;
- }
- // Checks for initial size length
- if (options.size) {
- if (utils_1.isFiniteInteger(options.size) && options.size > 0) {
- this._buff = Buffer.allocUnsafe(options.size);
- }
- else {
- throw new Error(utils_1.ERRORS.INVALID_SMARTBUFFER_SIZE);
- }
- // Check for initial Buffer
- }
- else if (options.buff) {
- if (options.buff instanceof Buffer) {
- this._buff = options.buff;
- this.length = options.buff.length;
- }
- else {
- throw new Error(utils_1.ERRORS.INVALID_SMARTBUFFER_BUFFER);
- }
- }
- else {
- this._buff = Buffer.allocUnsafe(DEFAULT_SMARTBUFFER_SIZE);
- }
+ // shortcuts
+ if (!options.noglobstar && pattern === '**') return GLOBSTAR
+ if (pattern === '') return ''
+
+ var re = ''
+ var hasMagic = !!options.nocase
+ var escaping = false
+ // ? => one single character
+ var patternListStack = []
+ var negativeLists = []
+ var stateChar
+ var inClass = false
+ var reClassStart = -1
+ var classStart = -1
+ // . and .. never match anything that doesn't start with .,
+ // even when options.dot is set.
+ var patternStart = pattern.charAt(0) === '.' ? '' // anything
+ // not (start or / followed by . or .. followed by / or end)
+ : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))'
+ : '(?!\\.)'
+ var self = this
+
+ function clearStateChar () {
+ if (stateChar) {
+ // we had some state-tracking character
+ // that wasn't consumed by this pass.
+ switch (stateChar) {
+ case '*':
+ re += star
+ hasMagic = true
+ break
+ case '?':
+ re += qmark
+ hasMagic = true
+ break
+ default:
+ re += '\\' + stateChar
+ break
+ }
+ self.debug('clearStateChar %j %j', stateChar, re)
+ stateChar = false
+ }
+ }
+
+ for (var i = 0, len = pattern.length, c
+ ; (i < len) && (c = pattern.charAt(i))
+ ; i++) {
+ this.debug('%s\t%s %s %j', pattern, i, re, c)
+
+ // skip over any that are escaped.
+ if (escaping && reSpecials[c]) {
+ re += '\\' + c
+ escaping = false
+ continue
+ }
+
+ switch (c) {
+ case '/':
+ // completely not allowed, even escaped.
+ // Should already be path-split by now.
+ return false
+
+ case '\\':
+ clearStateChar()
+ escaping = true
+ continue
+
+ // the various stateChar values
+ // for the "extglob" stuff.
+ case '?':
+ case '*':
+ case '+':
+ case '@':
+ case '!':
+ this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c)
+
+ // all of those are literals inside a class, except that
+ // the glob [!a] means [^a] in regexp
+ if (inClass) {
+ this.debug(' in class')
+ if (c === '!' && i === classStart + 1) c = '^'
+ re += c
+ continue
}
- else {
- // If something was passed but it's not a SmartBufferOptions object
- if (typeof options !== 'undefined') {
- throw new Error(utils_1.ERRORS.INVALID_SMARTBUFFER_OBJECT);
- }
- // Otherwise default to sane options
- this._buff = Buffer.allocUnsafe(DEFAULT_SMARTBUFFER_SIZE);
+
+ // if we already have a stateChar, then it means
+ // that there was something like ** or +? in there.
+ // Handle the stateChar, then proceed with this one.
+ self.debug('call clearStateChar %j', stateChar)
+ clearStateChar()
+ stateChar = c
+ // if extglob is disabled, then +(asdf|foo) isn't a thing.
+ // just clear the statechar *now*, rather than even diving into
+ // the patternList stuff.
+ if (options.noext) clearStateChar()
+ continue
+
+ case '(':
+ if (inClass) {
+ re += '('
+ continue
+ }
+
+ if (!stateChar) {
+ re += '\\('
+ continue
+ }
+
+ patternListStack.push({
+ type: stateChar,
+ start: i - 1,
+ reStart: re.length,
+ open: plTypes[stateChar].open,
+ close: plTypes[stateChar].close
+ })
+ // negation is (?:(?!js)[^/]*)
+ re += stateChar === '!' ? '(?:(?!(?:' : '(?:'
+ this.debug('plType %j %j', stateChar, re)
+ stateChar = false
+ continue
+
+ case ')':
+ if (inClass || !patternListStack.length) {
+ re += '\\)'
+ continue
+ }
+
+ clearStateChar()
+ hasMagic = true
+ var pl = patternListStack.pop()
+ // negation is (?:(?!js)[^/]*)
+ // The others are (?:)
+ re += pl.close
+ if (pl.type === '!') {
+ negativeLists.push(pl)
+ }
+ pl.reEnd = re.length
+ continue
+
+ case '|':
+ if (inClass || !patternListStack.length || escaping) {
+ re += '\\|'
+ escaping = false
+ continue
+ }
+
+ clearStateChar()
+ re += '|'
+ continue
+
+ // these are mostly the same in regexp and glob
+ case '[':
+ // swallow any state-tracking char before the [
+ clearStateChar()
+
+ if (inClass) {
+ re += '\\' + c
+ continue
+ }
+
+ inClass = true
+ classStart = i
+ reClassStart = re.length
+ re += c
+ continue
+
+ case ']':
+ // a right bracket shall lose its special
+ // meaning and represent itself in
+ // a bracket expression if it occurs
+ // first in the list. -- POSIX.2 2.8.3.2
+ if (i === classStart + 1 || !inClass) {
+ re += '\\' + c
+ escaping = false
+ continue
+ }
+
+ // handle the case where we left a class open.
+ // "[z-a]" is valid, equivalent to "\[z-a\]"
+ if (inClass) {
+ // split where the last [ was, make sure we don't have
+ // an invalid re. if so, re-walk the contents of the
+ // would-be class to re-translate any characters that
+ // were passed through as-is
+ // TODO: It would probably be faster to determine this
+ // without a try/catch and a new RegExp, but it's tricky
+ // to do safely. For now, this is safe and works.
+ var cs = pattern.substring(classStart + 1, i)
+ try {
+ RegExp('[' + cs + ']')
+ } catch (er) {
+ // not a valid class!
+ var sp = this.parse(cs, SUBPARSE)
+ re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]'
+ hasMagic = hasMagic || sp[1]
+ inClass = false
+ continue
+ }
+ }
+
+ // finish up the class.
+ hasMagic = true
+ inClass = false
+ re += c
+ continue
+
+ default:
+ // swallow any state char that wasn't consumed
+ clearStateChar()
+
+ if (escaping) {
+ // no need
+ escaping = false
+ } else if (reSpecials[c]
+ && !(c === '^' && inClass)) {
+ re += '\\'
}
+
+ re += c
+
+ } // switch
+ } // for
+
+ // handle the case where we left a class open.
+ // "[abc" is valid, equivalent to "\[abc"
+ if (inClass) {
+ // split where the last [ was, and escape it
+ // this is a huge pita. We now have to re-walk
+ // the contents of the would-be class to re-translate
+ // any characters that were passed through as-is
+ cs = pattern.substr(classStart + 1)
+ sp = this.parse(cs, SUBPARSE)
+ re = re.substr(0, reClassStart) + '\\[' + sp[0]
+ hasMagic = hasMagic || sp[1]
+ }
+
+ // handle the case where we had a +( thing at the *end*
+ // of the pattern.
+ // each pattern list stack adds 3 chars, and we need to go through
+ // and escape any | chars that were passed through as-is for the regexp.
+ // Go through and escape them, taking care not to double-escape any
+ // | chars that were already escaped.
+ for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {
+ var tail = re.slice(pl.reStart + pl.open.length)
+ this.debug('setting tail', re, pl)
+ // maybe some even number of \, then maybe 1 \, followed by a |
+ tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) {
+ if (!$2) {
+ // the | isn't already escaped, so escape it.
+ $2 = '\\'
+ }
+
+ // need to escape all those slashes *again*, without escaping the
+ // one that we need for escaping the | character. As it works out,
+ // escaping an even number of slashes can be done by simply repeating
+ // it exactly after itself. That's why this trick works.
+ //
+ // I am sorry that you have to see this.
+ return $1 + $1 + $2 + '|'
+ })
+
+ this.debug('tail=%j\n %s', tail, tail, pl, re)
+ var t = pl.type === '*' ? star
+ : pl.type === '?' ? qmark
+ : '\\' + pl.type
+
+ hasMagic = true
+ re = re.slice(0, pl.reStart) + t + '\\(' + tail
+ }
+
+ // handle trailing things that only matter at the very end.
+ clearStateChar()
+ if (escaping) {
+ // trailing \\
+ re += '\\\\'
+ }
+
+ // only need to apply the nodot start if the re starts with
+ // something that could conceivably capture a dot
+ var addPatternStart = false
+ switch (re.charAt(0)) {
+ case '.':
+ case '[':
+ case '(': addPatternStart = true
+ }
+
+ // Hack to work around lack of negative lookbehind in JS
+ // A pattern like: *.!(x).!(y|z) needs to ensure that a name
+ // like 'a.xyz.yz' doesn't match. So, the first negative
+ // lookahead, has to look ALL the way ahead, to the end of
+ // the pattern.
+ for (var n = negativeLists.length - 1; n > -1; n--) {
+ var nl = negativeLists[n]
+
+ var nlBefore = re.slice(0, nl.reStart)
+ var nlFirst = re.slice(nl.reStart, nl.reEnd - 8)
+ var nlLast = re.slice(nl.reEnd - 8, nl.reEnd)
+ var nlAfter = re.slice(nl.reEnd)
+
+ nlLast += nlAfter
+
+ // Handle nested stuff like *(*.js|!(*.json)), where open parens
+ // mean that we should *not* include the ) in the bit that is considered
+ // "after" the negated section.
+ var openParensBefore = nlBefore.split('(').length - 1
+ var cleanAfter = nlAfter
+ for (i = 0; i < openParensBefore; i++) {
+ cleanAfter = cleanAfter.replace(/\)[+*?]?/, '')
}
- /**
- * Creates a new SmartBuffer instance with the provided internal Buffer size and optional encoding.
- *
- * @param size { Number } The size of the internal Buffer.
- * @param encoding { String } The BufferEncoding to use for strings.
- *
- * @return { SmartBuffer }
- */
- static fromSize(size, encoding) {
- return new this({
- size: size,
- encoding: encoding
- });
+ nlAfter = cleanAfter
+
+ var dollar = ''
+ if (nlAfter === '' && isSub !== SUBPARSE) {
+ dollar = '$'
}
- /**
- * Creates a new SmartBuffer instance with the provided Buffer and optional encoding.
- *
- * @param buffer { Buffer } The Buffer to use as the internal Buffer value.
- * @param encoding { String } The BufferEncoding to use for strings.
- *
- * @return { SmartBuffer }
- */
- static fromBuffer(buff, encoding) {
- return new this({
- buff: buff,
- encoding: encoding
- });
+ var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast
+ re = newRe
+ }
+
+ // if the re is not "" at this point, then we need to make sure
+ // it doesn't match against an empty path part.
+ // Otherwise a/* will match a/, which it should not.
+ if (re !== '' && hasMagic) {
+ re = '(?=.)' + re
+ }
+
+ if (addPatternStart) {
+ re = patternStart + re
+ }
+
+ // parsing just a piece of a larger pattern.
+ if (isSub === SUBPARSE) {
+ return [re, hasMagic]
+ }
+
+ // skip the regexp for non-magical patterns
+ // unescape anything in it, though, so that it'll be
+ // an exact match against a file etc.
+ if (!hasMagic) {
+ return globUnescape(pattern)
+ }
+
+ var flags = options.nocase ? 'i' : ''
+ try {
+ var regExp = new RegExp('^' + re + '$', flags)
+ } catch (er) {
+ // If it was an invalid regular expression, then it can't match
+ // anything. This trick looks for a character after the end of
+ // the string, which is of course impossible, except in multi-line
+ // mode, but it's not a /m regex.
+ return new RegExp('$.')
+ }
+
+ regExp._glob = pattern
+ regExp._src = re
+
+ return regExp
+}
+
+minimatch.makeRe = function (pattern, options) {
+ return new Minimatch(pattern, options || {}).makeRe()
+}
+
+Minimatch.prototype.makeRe = makeRe
+function makeRe () {
+ if (this.regexp || this.regexp === false) return this.regexp
+
+ // at this point, this.set is a 2d array of partial
+ // pattern strings, or "**".
+ //
+ // It's better to use .match(). This function shouldn't
+ // be used, really, but it's pretty convenient sometimes,
+ // when you just want to work with a regex.
+ var set = this.set
+
+ if (!set.length) {
+ this.regexp = false
+ return this.regexp
+ }
+ var options = this.options
+
+ var twoStar = options.noglobstar ? star
+ : options.dot ? twoStarDot
+ : twoStarNoDot
+ var flags = options.nocase ? 'i' : ''
+
+ var re = set.map(function (pattern) {
+ return pattern.map(function (p) {
+ return (p === GLOBSTAR) ? twoStar
+ : (typeof p === 'string') ? regExpEscape(p)
+ : p._src
+ }).join('\\\/')
+ }).join('|')
+
+ // must match entire pattern
+ // ending in a * or ** will make it less strict.
+ re = '^(?:' + re + ')$'
+
+ // can match anything, as long as it's not this.
+ if (this.negate) re = '^(?!' + re + ').*$'
+
+ try {
+ this.regexp = new RegExp(re, flags)
+ } catch (ex) {
+ this.regexp = false
+ }
+ return this.regexp
+}
+
+minimatch.match = function (list, pattern, options) {
+ options = options || {}
+ var mm = new Minimatch(pattern, options)
+ list = list.filter(function (f) {
+ return mm.match(f)
+ })
+ if (mm.options.nonull && !list.length) {
+ list.push(pattern)
+ }
+ return list
+}
+
+Minimatch.prototype.match = match
+function match (f, partial) {
+ this.debug('match', f, this.pattern)
+ // short-circuit in the case of busted things.
+ // comments, etc.
+ if (this.comment) return false
+ if (this.empty) return f === ''
+
+ if (f === '/' && partial) return true
+
+ var options = this.options
+
+ // windows: need to use /, not \
+ if (path.sep !== '/') {
+ f = f.split(path.sep).join('/')
+ }
+
+ // treat the test path as a set of pathparts.
+ f = f.split(slashSplit)
+ this.debug(this.pattern, 'split', f)
+
+ // just ONE of the pattern sets in this.set needs to match
+ // in order for it to be valid. If negating, then just one
+ // match means that we have failed.
+ // Either way, return on the first hit.
+
+ var set = this.set
+ this.debug(this.pattern, 'set', set)
+
+ // Find the basename of the path by looking for the last non-empty segment
+ var filename
+ var i
+ for (i = f.length - 1; i >= 0; i--) {
+ filename = f[i]
+ if (filename) break
+ }
+
+ for (i = 0; i < set.length; i++) {
+ var pattern = set[i]
+ var file = f
+ if (options.matchBase && pattern.length === 1) {
+ file = [filename]
}
- /**
- * Creates a new SmartBuffer instance with the provided SmartBufferOptions options.
- *
- * @param options { SmartBufferOptions } The options to use when creating the SmartBuffer instance.
- */
- static fromOptions(options) {
- return new this(options);
+ var hit = this.matchOne(file, pattern, partial)
+ if (hit) {
+ if (options.flipNegate) return true
+ return !this.negate
}
- /**
- * Type checking function that determines if an object is a SmartBufferOptions object.
- */
- static isSmartBufferOptions(options) {
- const castOptions = options;
- return (castOptions &&
- (castOptions.encoding !== undefined || castOptions.size !== undefined || castOptions.buff !== undefined));
- }
- // Signed integers
- /**
- * Reads an Int8 value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
- */
- readInt8(offset) {
- return this._readNumberValue(Buffer.prototype.readInt8, 1, offset);
- }
- /**
- * Reads an Int16BE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
- */
- readInt16BE(offset) {
- return this._readNumberValue(Buffer.prototype.readInt16BE, 2, offset);
- }
- /**
- * Reads an Int16LE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
- */
- readInt16LE(offset) {
- return this._readNumberValue(Buffer.prototype.readInt16LE, 2, offset);
- }
- /**
- * Reads an Int32BE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
- */
- readInt32BE(offset) {
- return this._readNumberValue(Buffer.prototype.readInt32BE, 4, offset);
- }
- /**
- * Reads an Int32LE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
- */
- readInt32LE(offset) {
- return this._readNumberValue(Buffer.prototype.readInt32LE, 4, offset);
- }
- /**
- * Reads a BigInt64BE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { BigInt }
- */
- readBigInt64BE(offset) {
- utils_1.bigIntAndBufferInt64Check('readBigInt64BE');
- return this._readNumberValue(Buffer.prototype.readBigInt64BE, 8, offset);
- }
- /**
- * Reads a BigInt64LE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { BigInt }
- */
- readBigInt64LE(offset) {
- utils_1.bigIntAndBufferInt64Check('readBigInt64LE');
- return this._readNumberValue(Buffer.prototype.readBigInt64LE, 8, offset);
+ }
+
+ // didn't get any hits. this is success if it's a negative
+ // pattern, failure otherwise.
+ if (options.flipNegate) return false
+ return this.negate
+}
+
+// set partial to true to test if, for example,
+// "/a/b" matches the start of "/*/b/*/d"
+// Partial means, if you run out of file before you run
+// out of pattern, then that's fine, as long as all
+// the parts match.
+Minimatch.prototype.matchOne = function (file, pattern, partial) {
+ var options = this.options
+
+ this.debug('matchOne',
+ { 'this': this, file: file, pattern: pattern })
+
+ this.debug('matchOne', file.length, pattern.length)
+
+ for (var fi = 0,
+ pi = 0,
+ fl = file.length,
+ pl = pattern.length
+ ; (fi < fl) && (pi < pl)
+ ; fi++, pi++) {
+ this.debug('matchOne loop')
+ var p = pattern[pi]
+ var f = file[fi]
+
+ this.debug(pattern, p, f)
+
+ // should be impossible.
+ // some invalid regexp stuff in the set.
+ if (p === false) return false
+
+ if (p === GLOBSTAR) {
+ this.debug('GLOBSTAR', [pattern, p, f])
+
+ // "**"
+ // a/**/b/**/c would match the following:
+ // a/b/x/y/z/c
+ // a/x/y/z/b/c
+ // a/b/x/b/x/c
+ // a/b/c
+ // To do this, take the rest of the pattern after
+ // the **, and see if it would match the file remainder.
+ // If so, return success.
+ // If not, the ** "swallows" a segment, and try again.
+ // This is recursively awful.
+ //
+ // a/**/b/**/c matching a/b/x/y/z/c
+ // - a matches a
+ // - doublestar
+ // - matchOne(b/x/y/z/c, b/**/c)
+ // - b matches b
+ // - doublestar
+ // - matchOne(x/y/z/c, c) -> no
+ // - matchOne(y/z/c, c) -> no
+ // - matchOne(z/c, c) -> no
+ // - matchOne(c, c) yes, hit
+ var fr = fi
+ var pr = pi + 1
+ if (pr === pl) {
+ this.debug('** at the end')
+ // a ** at the end will just swallow the rest.
+ // We have found a match.
+ // however, it will not swallow /.x, unless
+ // options.dot is set.
+ // . and .. are *never* matched by **, for explosively
+ // exponential reasons.
+ for (; fi < fl; fi++) {
+ if (file[fi] === '.' || file[fi] === '..' ||
+ (!options.dot && file[fi].charAt(0) === '.')) return false
+ }
+ return true
+ }
+
+ // ok, let's see if we can swallow whatever we can.
+ while (fr < fl) {
+ var swallowee = file[fr]
+
+ this.debug('\nglobstar while', file, fr, pattern, pr, swallowee)
+
+ // XXX remove this slice. Just pass the start index.
+ if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+ this.debug('globstar found match!', fr, fl, swallowee)
+ // found a match.
+ return true
+ } else {
+ // can't swallow "." or ".." ever.
+ // can only swallow ".foo" when explicitly asked.
+ if (swallowee === '.' || swallowee === '..' ||
+ (!options.dot && swallowee.charAt(0) === '.')) {
+ this.debug('dot detected!', file, fr, pattern, pr)
+ break
+ }
+
+ // ** swallows a segment, and continue.
+ this.debug('globstar swallow a segment, and continue')
+ fr++
+ }
+ }
+
+ // no match was found.
+ // However, in partial mode, we can't say this is necessarily over.
+ // If there's more *pattern* left, then
+ if (partial) {
+ // ran out of file
+ this.debug('\n>>> no match, partial?', file, fr, pattern, pr)
+ if (fr === fl) return true
+ }
+ return false
}
- /**
- * Writes an Int8 value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeInt8(value, offset) {
- this._writeNumberValue(Buffer.prototype.writeInt8, 1, value, offset);
- return this;
+
+ // something other than **
+ // non-magic patterns just have to match exactly
+ // patterns with magic have been turned into regexps.
+ var hit
+ if (typeof p === 'string') {
+ if (options.nocase) {
+ hit = f.toLowerCase() === p.toLowerCase()
+ } else {
+ hit = f === p
+ }
+ this.debug('string match', p, f, hit)
+ } else {
+ hit = f.match(p)
+ this.debug('pattern match', p, f, hit)
}
- /**
- * Inserts an Int8 value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertInt8(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeInt8, 1, value, offset);
+
+ if (!hit) return false
+ }
+
+ // Note: ending in / means that we'll get a final ""
+ // at the end of the pattern. This can only match a
+ // corresponding "" at the end of the file.
+ // If the file ends in /, then it can only match a
+ // a pattern that ends in /, unless the pattern just
+ // doesn't have any more for it. But, a/b/ should *not*
+ // match "a/b/*", even though "" matches against the
+ // [^/]*? pattern, except in partial mode, where it might
+ // simply not be reached yet.
+ // However, a/b/ should still satisfy a/*
+
+ // now either we fell off the end of the pattern, or we're done.
+ if (fi === fl && pi === pl) {
+ // ran out of pattern and filename at the same time.
+ // an exact hit!
+ return true
+ } else if (fi === fl) {
+ // ran out of file, but still had pattern left.
+ // this is ok if we're doing the match as part of
+ // a glob fs traversal.
+ return partial
+ } else if (pi === pl) {
+ // ran out of pattern, still have file left.
+ // this is only acceptable if we're on the very last
+ // empty segment of a file with a trailing slash.
+ // a/* should match a/b/
+ var emptyFileEnd = (fi === fl - 1) && (file[fi] === '')
+ return emptyFileEnd
+ }
+
+ // should be unreachable.
+ throw new Error('wtf?')
+}
+
+// replace stuff like \* with *
+function globUnescape (s) {
+ return s.replace(/\\(.)/g, '$1')
+}
+
+function regExpEscape (s) {
+ return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&')
+}
+
+
+/***/ }),
+/* 94 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+var path = __webpack_require__(622)
+
+var uniqueSlug = __webpack_require__(336)
+
+module.exports = function (filepath, prefix, uniq) {
+ return path.join(filepath, (prefix ? prefix + '-' : '') + uniqueSlug(uniq))
+}
+
+
+/***/ }),
+/* 95 */
+/***/ (function(__unusedmodule, exports) {
+
+"use strict";
+
+/*
+ * Copyright The OpenTelemetry Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+Object.defineProperty(exports, "__esModule", { value: true });
+//# sourceMappingURL=link.js.map
+
+/***/ }),
+/* 96 */,
+/* 97 */
+/***/ (function(module) {
+
+"use strict";
+
+/* eslint-disable yoda */
+module.exports = x => {
+ if (Number.isNaN(x)) {
+ return false;
+ }
+
+ // code points are derived from:
+ // http://www.unix.org/Public/UNIDATA/EastAsianWidth.txt
+ if (
+ x >= 0x1100 && (
+ x <= 0x115f || // Hangul Jamo
+ x === 0x2329 || // LEFT-POINTING ANGLE BRACKET
+ x === 0x232a || // RIGHT-POINTING ANGLE BRACKET
+ // CJK Radicals Supplement .. Enclosed CJK Letters and Months
+ (0x2e80 <= x && x <= 0x3247 && x !== 0x303f) ||
+ // Enclosed CJK Letters and Months .. CJK Unified Ideographs Extension A
+ (0x3250 <= x && x <= 0x4dbf) ||
+ // CJK Unified Ideographs .. Yi Radicals
+ (0x4e00 <= x && x <= 0xa4c6) ||
+ // Hangul Jamo Extended-A
+ (0xa960 <= x && x <= 0xa97c) ||
+ // Hangul Syllables
+ (0xac00 <= x && x <= 0xd7a3) ||
+ // CJK Compatibility Ideographs
+ (0xf900 <= x && x <= 0xfaff) ||
+ // Vertical Forms
+ (0xfe10 <= x && x <= 0xfe19) ||
+ // CJK Compatibility Forms .. Small Form Variants
+ (0xfe30 <= x && x <= 0xfe6b) ||
+ // Halfwidth and Fullwidth Forms
+ (0xff01 <= x && x <= 0xff60) ||
+ (0xffe0 <= x && x <= 0xffe6) ||
+ // Kana Supplement
+ (0x1b000 <= x && x <= 0x1b001) ||
+ // Enclosed Ideographic Supplement
+ (0x1f200 <= x && x <= 0x1f251) ||
+ // CJK Unified Ideographs Extension B .. Tertiary Ideographic Plane
+ (0x20000 <= x && x <= 0x3fffd)
+ )
+ ) {
+ return true;
+ }
+
+ return false;
+};
+
+
+/***/ }),
+/* 98 */
+/***/ (function(module) {
+
+"use strict";
+
+
+module.exports = stringifyPackage
+
+const DEFAULT_INDENT = 2
+const CRLF = '\r\n'
+const LF = '\n'
+
+function stringifyPackage (data, indent, newline) {
+ indent = indent || (indent === 0 ? 0 : DEFAULT_INDENT)
+ const json = JSON.stringify(data, null, indent)
+
+ if (newline === CRLF) {
+ return json.replace(/\n/g, CRLF) + CRLF
+ }
+
+ return json + LF
+}
+
+
+/***/ }),
+/* 99 */,
+/* 100 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+// try to find the most reasonable prefix to use
+
+module.exports = findPrefix
+
+const fs = __webpack_require__(747)
+const path = __webpack_require__(622)
+
+function findPrefix (dir) {
+ return new Promise((resolve, reject) => {
+ dir = path.resolve(dir)
+
+ // this is a weird special case where an infinite recurse of
+ // node_modules folders resolves to the level that contains the
+ // very first node_modules folder
+ let walkedUp = false
+ while (path.basename(dir) === 'node_modules') {
+ dir = path.dirname(dir)
+ walkedUp = true
}
- /**
- * Writes an Int16BE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeInt16BE(value, offset) {
- return this._writeNumberValue(Buffer.prototype.writeInt16BE, 2, value, offset);
+ if (walkedUp) {
+ resolve(dir)
+ } else {
+ resolve(findPrefix_(dir))
}
- /**
- * Inserts an Int16BE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertInt16BE(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeInt16BE, 2, value, offset);
+ })
+}
+
+function findPrefix_ (dir, original) {
+ if (!original) original = dir
+
+ const parent = path.dirname(dir)
+ // this is a platform independent way of checking if we're in the root
+ // directory
+ if (parent === dir) return Promise.resolve(original)
+
+ return new Promise((resolve, reject) => {
+ fs.readdir(dir, (err, files) => {
+ if (err) {
+ // an error right away is a bad sign.
+ // unless the prefix was simply a non
+ // existent directory.
+ if (err && dir === original && err.code !== 'ENOENT') {
+ reject(err)
+ } else {
+ resolve(original)
+ }
+ } else if (files.indexOf('node_modules') !== -1 ||
+ files.indexOf('package.json') !== -1) {
+ resolve(dir)
+ } else {
+ resolve(findPrefix_(parent, original))
+ }
+ })
+ })
+}
+
+
+/***/ }),
+/* 101 */,
+/* 102 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+// For internal use, subject to change.
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+// We use any as a valid input type
+/* eslint-disable @typescript-eslint/no-explicit-any */
+const fs = __importStar(__webpack_require__(747));
+const os = __importStar(__webpack_require__(87));
+const utils_1 = __webpack_require__(82);
+function issueCommand(command, message) {
+ const filePath = process.env[`GITHUB_${command}`];
+ if (!filePath) {
+ throw new Error(`Unable to find environment variable for file command ${command}`);
}
- /**
- * Writes an Int16LE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeInt16LE(value, offset) {
- return this._writeNumberValue(Buffer.prototype.writeInt16LE, 2, value, offset);
+ if (!fs.existsSync(filePath)) {
+ throw new Error(`Missing file at path: ${filePath}`);
}
- /**
- * Inserts an Int16LE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertInt16LE(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeInt16LE, 2, value, offset);
- }
- /**
- * Writes an Int32BE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeInt32BE(value, offset) {
- return this._writeNumberValue(Buffer.prototype.writeInt32BE, 4, value, offset);
- }
- /**
- * Inserts an Int32BE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertInt32BE(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeInt32BE, 4, value, offset);
- }
- /**
- * Writes an Int32LE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeInt32LE(value, offset) {
- return this._writeNumberValue(Buffer.prototype.writeInt32LE, 4, value, offset);
- }
- /**
- * Inserts an Int32LE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertInt32LE(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeInt32LE, 4, value, offset);
- }
- /**
- * Writes a BigInt64BE value to the current write position (or at optional offset).
- *
- * @param value { BigInt } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeBigInt64BE(value, offset) {
- utils_1.bigIntAndBufferInt64Check('writeBigInt64BE');
- return this._writeNumberValue(Buffer.prototype.writeBigInt64BE, 8, value, offset);
- }
- /**
- * Inserts a BigInt64BE value at the given offset value.
- *
- * @param value { BigInt } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertBigInt64BE(value, offset) {
- utils_1.bigIntAndBufferInt64Check('writeBigInt64BE');
- return this._insertNumberValue(Buffer.prototype.writeBigInt64BE, 8, value, offset);
- }
- /**
- * Writes a BigInt64LE value to the current write position (or at optional offset).
- *
- * @param value { BigInt } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeBigInt64LE(value, offset) {
- utils_1.bigIntAndBufferInt64Check('writeBigInt64LE');
- return this._writeNumberValue(Buffer.prototype.writeBigInt64LE, 8, value, offset);
- }
- /**
- * Inserts a Int64LE value at the given offset value.
- *
- * @param value { BigInt } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertBigInt64LE(value, offset) {
- utils_1.bigIntAndBufferInt64Check('writeBigInt64LE');
- return this._insertNumberValue(Buffer.prototype.writeBigInt64LE, 8, value, offset);
- }
- // Unsigned Integers
- /**
- * Reads an UInt8 value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
- */
- readUInt8(offset) {
- return this._readNumberValue(Buffer.prototype.readUInt8, 1, offset);
- }
- /**
- * Reads an UInt16BE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
- */
- readUInt16BE(offset) {
- return this._readNumberValue(Buffer.prototype.readUInt16BE, 2, offset);
- }
- /**
- * Reads an UInt16LE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
- */
- readUInt16LE(offset) {
- return this._readNumberValue(Buffer.prototype.readUInt16LE, 2, offset);
- }
- /**
- * Reads an UInt32BE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
- */
- readUInt32BE(offset) {
- return this._readNumberValue(Buffer.prototype.readUInt32BE, 4, offset);
+ fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {
+ encoding: 'utf8'
+ });
+}
+exports.issueCommand = issueCommand;
+//# sourceMappingURL=file-command.js.map
+
+/***/ }),
+/* 103 */,
+/* 104 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = void 0;
+
+var _validate = _interopRequireDefault(__webpack_require__(676));
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+function version(uuid) {
+ if (!(0, _validate.default)(uuid)) {
+ throw TypeError('Invalid UUID');
+ }
+
+ return parseInt(uuid.substr(14, 1), 16);
+}
+
+var _default = version;
+exports.default = _default;
+
+/***/ }),
+/* 105 */
+/***/ (function(module) {
+
+/**
+ * Convert array of 16 byte values to UUID string format of the form:
+ * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
+ */
+var byteToHex = [];
+for (var i = 0; i < 256; ++i) {
+ byteToHex[i] = (i + 0x100).toString(16).substr(1);
+}
+
+function bytesToUuid(buf, offset) {
+ var i = offset || 0;
+ var bth = byteToHex;
+ // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4
+ return ([
+ bth[buf[i++]], bth[buf[i++]],
+ bth[buf[i++]], bth[buf[i++]], '-',
+ bth[buf[i++]], bth[buf[i++]], '-',
+ bth[buf[i++]], bth[buf[i++]], '-',
+ bth[buf[i++]], bth[buf[i++]], '-',
+ bth[buf[i++]], bth[buf[i++]],
+ bth[buf[i++]], bth[buf[i++]],
+ bth[buf[i++]], bth[buf[i++]]
+ ]).join('');
+}
+
+module.exports = bytesToUuid;
+
+
+/***/ }),
+/* 106 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+var tslib = __webpack_require__(422);
+
+var listenersMap = new WeakMap();
+var abortedMap = new WeakMap();
+/**
+ * An aborter instance implements AbortSignal interface, can abort HTTP requests.
+ *
+ * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled.
+ * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation
+ * cannot or will not ever be cancelled.
+ *
+ * @example
+ * // Abort without timeout
+ * await doAsyncWork(AbortSignal.none);
+ *
+ * @export
+ * @class AbortSignal
+ * @implements {AbortSignalLike}
+ */
+var AbortSignal = /** @class */ (function () {
+ function AbortSignal() {
+ /**
+ * onabort event listener.
+ *
+ * @memberof AbortSignal
+ */
+ this.onabort = null;
+ listenersMap.set(this, []);
+ abortedMap.set(this, false);
}
+ Object.defineProperty(AbortSignal.prototype, "aborted", {
+ /**
+ * Status of whether aborted or not.
+ *
+ * @readonly
+ * @type {boolean}
+ * @memberof AbortSignal
+ */
+ get: function () {
+ if (!abortedMap.has(this)) {
+ throw new TypeError("Expected `this` to be an instance of AbortSignal.");
+ }
+ return abortedMap.get(this);
+ },
+ enumerable: true,
+ configurable: true
+ });
+ Object.defineProperty(AbortSignal, "none", {
+ /**
+ * Creates a new AbortSignal instance that will never be aborted.
+ *
+ * @readonly
+ * @static
+ * @type {AbortSignal}
+ * @memberof AbortSignal
+ */
+ get: function () {
+ return new AbortSignal();
+ },
+ enumerable: true,
+ configurable: true
+ });
/**
- * Reads an UInt32LE value from the current read position or an optionally provided offset.
+ * Added new "abort" event listener, only support "abort" event.
*
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
+ * @param {"abort"} _type Only support "abort" event
+ * @param {(this: AbortSignalLike, ev: any) => any} listener
+ * @memberof AbortSignal
*/
- readUInt32LE(offset) {
- return this._readNumberValue(Buffer.prototype.readUInt32LE, 4, offset);
- }
+ AbortSignal.prototype.addEventListener = function (
+ // tslint:disable-next-line:variable-name
+ _type, listener) {
+ if (!listenersMap.has(this)) {
+ throw new TypeError("Expected `this` to be an instance of AbortSignal.");
+ }
+ var listeners = listenersMap.get(this);
+ listeners.push(listener);
+ };
/**
- * Reads a BigUInt64BE value from the current read position or an optionally provided offset.
+ * Remove "abort" event listener, only support "abort" event.
*
- * @param offset { Number } The offset to read data from (optional)
- * @return { BigInt }
+ * @param {"abort"} _type Only support "abort" event
+ * @param {(this: AbortSignalLike, ev: any) => any} listener
+ * @memberof AbortSignal
*/
- readBigUInt64BE(offset) {
- utils_1.bigIntAndBufferInt64Check('readBigUInt64BE');
- return this._readNumberValue(Buffer.prototype.readBigUInt64BE, 8, offset);
- }
+ AbortSignal.prototype.removeEventListener = function (
+ // tslint:disable-next-line:variable-name
+ _type, listener) {
+ if (!listenersMap.has(this)) {
+ throw new TypeError("Expected `this` to be an instance of AbortSignal.");
+ }
+ var listeners = listenersMap.get(this);
+ var index = listeners.indexOf(listener);
+ if (index > -1) {
+ listeners.splice(index, 1);
+ }
+ };
/**
- * Reads a BigUInt64LE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { BigInt }
+ * Dispatches a synthetic event to the AbortSignal.
*/
- readBigUInt64LE(offset) {
- utils_1.bigIntAndBufferInt64Check('readBigUInt64LE');
- return this._readNumberValue(Buffer.prototype.readBigUInt64LE, 8, offset);
+ AbortSignal.prototype.dispatchEvent = function (_event) {
+ throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes.");
+ };
+ return AbortSignal;
+}());
+/**
+ * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered.
+ * Will try to trigger abort event for all linked AbortSignal nodes.
+ *
+ * - If there is a timeout, the timer will be cancelled.
+ * - If aborted is true, nothing will happen.
+ *
+ * @returns
+ * @internal
+ */
+function abortSignal(signal) {
+ if (signal.aborted) {
+ return;
}
- /**
- * Writes an UInt8 value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeUInt8(value, offset) {
- return this._writeNumberValue(Buffer.prototype.writeUInt8, 1, value, offset);
+ if (signal.onabort) {
+ signal.onabort.call(signal);
}
- /**
- * Inserts an UInt8 value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertUInt8(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeUInt8, 1, value, offset);
+ var listeners = listenersMap.get(signal);
+ if (listeners) {
+ listeners.forEach(function (listener) {
+ listener.call(signal, { type: "abort" });
+ });
}
- /**
- * Writes an UInt16BE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeUInt16BE(value, offset) {
- return this._writeNumberValue(Buffer.prototype.writeUInt16BE, 2, value, offset);
+ abortedMap.set(signal, true);
+}
+
+/**
+ * This error is thrown when an asynchronous operation has been aborted.
+ * Check for this error by testing the `name` that the name property of the
+ * error matches `"AbortError"`.
+ *
+ * @example
+ * const controller = new AbortController();
+ * controller.abort();
+ * try {
+ * doAsyncWork(controller.signal)
+ * } catch (e) {
+ * if (e.name === 'AbortError') {
+ * // handle abort error here.
+ * }
+ * }
+ */
+var AbortError = /** @class */ (function (_super) {
+ tslib.__extends(AbortError, _super);
+ function AbortError(message) {
+ var _this = _super.call(this, message) || this;
+ _this.name = "AbortError";
+ return _this;
}
- /**
- * Inserts an UInt16BE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertUInt16BE(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeUInt16BE, 2, value, offset);
+ return AbortError;
+}(Error));
+/**
+ * An AbortController provides an AbortSignal and the associated controls to signal
+ * that an asynchronous operation should be aborted.
+ *
+ * @example
+ * // Abort an operation when another event fires
+ * const controller = new AbortController();
+ * const signal = controller.signal;
+ * doAsyncWork(signal);
+ * button.addEventListener('click', () => controller.abort());
+ *
+ * @example
+ * // Share aborter cross multiple operations in 30s
+ * // Upload the same data to 2 different data centers at the same time,
+ * // abort another when any of them is finished
+ * const controller = AbortController.withTimeout(30 * 1000);
+ * doAsyncWork(controller.signal).then(controller.abort);
+ * doAsyncWork(controller.signal).then(controller.abort);
+ *
+ * @example
+ * // Cascaded aborting
+ * // All operations can't take more than 30 seconds
+ * const aborter = Aborter.timeout(30 * 1000);
+ *
+ * // Following 2 operations can't take more than 25 seconds
+ * await doAsyncWork(aborter.withTimeout(25 * 1000));
+ * await doAsyncWork(aborter.withTimeout(25 * 1000));
+ *
+ * @export
+ * @class AbortController
+ * @implements {AbortSignalLike}
+ */
+var AbortController = /** @class */ (function () {
+ function AbortController(parentSignals) {
+ var _this = this;
+ this._signal = new AbortSignal();
+ if (!parentSignals) {
+ return;
+ }
+ // coerce parentSignals into an array
+ if (!Array.isArray(parentSignals)) {
+ parentSignals = arguments;
+ }
+ for (var _i = 0, parentSignals_1 = parentSignals; _i < parentSignals_1.length; _i++) {
+ var parentSignal = parentSignals_1[_i];
+ // if the parent signal has already had abort() called,
+ // then call abort on this signal as well.
+ if (parentSignal.aborted) {
+ this.abort();
+ }
+ else {
+ // when the parent signal aborts, this signal should as well.
+ parentSignal.addEventListener("abort", function () {
+ _this.abort();
+ });
+ }
+ }
}
+ Object.defineProperty(AbortController.prototype, "signal", {
+ /**
+ * The AbortSignal associated with this controller that will signal aborted
+ * when the abort method is called on this controller.
+ *
+ * @readonly
+ * @type {AbortSignal}
+ * @memberof AbortController
+ */
+ get: function () {
+ return this._signal;
+ },
+ enumerable: true,
+ configurable: true
+ });
/**
- * Writes an UInt16LE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
+ * Signal that any operations passed this controller's associated abort signal
+ * to cancel any remaining work and throw an `AbortError`.
*
- * @return this
+ * @memberof AbortController
*/
- writeUInt16LE(value, offset) {
- return this._writeNumberValue(Buffer.prototype.writeUInt16LE, 2, value, offset);
- }
+ AbortController.prototype.abort = function () {
+ abortSignal(this._signal);
+ };
/**
- * Inserts an UInt16LE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
+ * Creates a new AbortSignal instance that will abort after the provided ms.
*
- * @return this
+ * @static
+ * @params {number} ms Elapsed time in milliseconds to trigger an abort.
+ * @returns {AbortSignal}
*/
- insertUInt16LE(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeUInt16LE, 2, value, offset);
- }
- /**
- * Writes an UInt32BE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeUInt32BE(value, offset) {
- return this._writeNumberValue(Buffer.prototype.writeUInt32BE, 4, value, offset);
- }
- /**
- * Inserts an UInt32BE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertUInt32BE(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeUInt32BE, 4, value, offset);
- }
- /**
- * Writes an UInt32LE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeUInt32LE(value, offset) {
- return this._writeNumberValue(Buffer.prototype.writeUInt32LE, 4, value, offset);
- }
- /**
- * Inserts an UInt32LE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertUInt32LE(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeUInt32LE, 4, value, offset);
- }
- /**
- * Writes a BigUInt64BE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeBigUInt64BE(value, offset) {
- utils_1.bigIntAndBufferInt64Check('writeBigUInt64BE');
- return this._writeNumberValue(Buffer.prototype.writeBigUInt64BE, 8, value, offset);
- }
- /**
- * Inserts a BigUInt64BE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertBigUInt64BE(value, offset) {
- utils_1.bigIntAndBufferInt64Check('writeBigUInt64BE');
- return this._insertNumberValue(Buffer.prototype.writeBigUInt64BE, 8, value, offset);
- }
- /**
- * Writes a BigUInt64LE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeBigUInt64LE(value, offset) {
- utils_1.bigIntAndBufferInt64Check('writeBigUInt64LE');
- return this._writeNumberValue(Buffer.prototype.writeBigUInt64LE, 8, value, offset);
- }
- /**
- * Inserts a BigUInt64LE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertBigUInt64LE(value, offset) {
- utils_1.bigIntAndBufferInt64Check('writeBigUInt64LE');
- return this._insertNumberValue(Buffer.prototype.writeBigUInt64LE, 8, value, offset);
- }
- // Floating Point
- /**
- * Reads an FloatBE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
- */
- readFloatBE(offset) {
- return this._readNumberValue(Buffer.prototype.readFloatBE, 4, offset);
- }
- /**
- * Reads an FloatLE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
- */
- readFloatLE(offset) {
- return this._readNumberValue(Buffer.prototype.readFloatLE, 4, offset);
- }
- /**
- * Writes a FloatBE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeFloatBE(value, offset) {
- return this._writeNumberValue(Buffer.prototype.writeFloatBE, 4, value, offset);
- }
- /**
- * Inserts a FloatBE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertFloatBE(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeFloatBE, 4, value, offset);
- }
- /**
- * Writes a FloatLE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeFloatLE(value, offset) {
- return this._writeNumberValue(Buffer.prototype.writeFloatLE, 4, value, offset);
- }
- /**
- * Inserts a FloatLE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertFloatLE(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeFloatLE, 4, value, offset);
- }
- // Double Floating Point
- /**
- * Reads an DoublEBE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
- */
- readDoubleBE(offset) {
- return this._readNumberValue(Buffer.prototype.readDoubleBE, 8, offset);
- }
- /**
- * Reads an DoubleLE value from the current read position or an optionally provided offset.
- *
- * @param offset { Number } The offset to read data from (optional)
- * @return { Number }
- */
- readDoubleLE(offset) {
- return this._readNumberValue(Buffer.prototype.readDoubleLE, 8, offset);
- }
- /**
- * Writes a DoubleBE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeDoubleBE(value, offset) {
- return this._writeNumberValue(Buffer.prototype.writeDoubleBE, 8, value, offset);
- }
- /**
- * Inserts a DoubleBE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertDoubleBE(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeDoubleBE, 8, value, offset);
- }
- /**
- * Writes a DoubleLE value to the current write position (or at optional offset).
- *
- * @param value { Number } The value to write.
- * @param offset { Number } The offset to write the value at.
- *
- * @return this
- */
- writeDoubleLE(value, offset) {
- return this._writeNumberValue(Buffer.prototype.writeDoubleLE, 8, value, offset);
- }
- /**
- * Inserts a DoubleLE value at the given offset value.
- *
- * @param value { Number } The value to insert.
- * @param offset { Number } The offset to insert the value at.
- *
- * @return this
- */
- insertDoubleLE(value, offset) {
- return this._insertNumberValue(Buffer.prototype.writeDoubleLE, 8, value, offset);
- }
- // Strings
- /**
- * Reads a String from the current read position.
- *
- * @param arg1 { Number | String } The number of bytes to read as a String, or the BufferEncoding to use for
- * the string (Defaults to instance level encoding).
- * @param encoding { String } The BufferEncoding to use for the string (Defaults to instance level encoding).
- *
- * @return { String }
- */
- readString(arg1, encoding) {
- let lengthVal;
- // Length provided
- if (typeof arg1 === 'number') {
- utils_1.checkLengthValue(arg1);
- lengthVal = Math.min(arg1, this.length - this._readOffset);
- }
- else {
- encoding = arg1;
- lengthVal = this.length - this._readOffset;
- }
- // Check encoding
- if (typeof encoding !== 'undefined') {
- utils_1.checkEncoding(encoding);
+ AbortController.timeout = function (ms) {
+ var signal = new AbortSignal();
+ var timer = setTimeout(abortSignal, ms, signal);
+ // Prevent the active Timer from keeping the Node.js event loop active.
+ if (typeof timer.unref === "function") {
+ timer.unref();
}
- const value = this._buff.slice(this._readOffset, this._readOffset + lengthVal).toString(encoding || this._encoding);
- this._readOffset += lengthVal;
- return value;
+ return signal;
+ };
+ return AbortController;
+}());
+
+exports.AbortController = AbortController;
+exports.AbortError = AbortError;
+exports.AbortSignal = AbortSignal;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+/* 107 */
+/***/ (function(__unusedmodule, exports) {
+
+"use strict";
+
+/*
+ * Copyright The OpenTelemetry Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+Object.defineProperty(exports, "__esModule", { value: true });
+//# sourceMappingURL=Observation.js.map
+
+/***/ }),
+/* 108 */,
+/* 109 */,
+/* 110 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+
+// Do a two-pass walk, first to get the list of packages that need to be
+// bundled, then again to get the actual files and folders.
+// Keep a cache of node_modules content and package.json data, so that the
+// second walk doesn't have to re-do all the same work.
+
+const bundleWalk = __webpack_require__(650)
+const BundleWalker = bundleWalk.BundleWalker
+const BundleWalkerSync = bundleWalk.BundleWalkerSync
+
+const ignoreWalk = __webpack_require__(418)
+const IgnoreWalker = ignoreWalk.Walker
+const IgnoreWalkerSync = ignoreWalk.WalkerSync
+
+const rootBuiltinRules = Symbol('root-builtin-rules')
+const packageNecessaryRules = Symbol('package-necessary-rules')
+const path = __webpack_require__(622)
+
+const normalizePackageBin = __webpack_require__(787)
+
+const defaultRules = [
+ '.npmignore',
+ '.gitignore',
+ '**/.git',
+ '**/.svn',
+ '**/.hg',
+ '**/CVS',
+ '**/.git/**',
+ '**/.svn/**',
+ '**/.hg/**',
+ '**/CVS/**',
+ '/.lock-wscript',
+ '/.wafpickle-*',
+ '/build/config.gypi',
+ 'npm-debug.log',
+ '**/.npmrc',
+ '.*.swp',
+ '.DS_Store',
+ '**/.DS_Store/**',
+ '._*',
+ '**/._*/**',
+ '*.orig',
+ '/package-lock.json',
+ '/yarn.lock',
+ 'archived-packages/**',
+ 'core',
+ '!core/',
+ '!**/core/',
+ '*.core',
+ '*.vgcore',
+ 'vgcore.*',
+ 'core.+([0-9])',
+]
+
+// There may be others, but :?|<> are handled by node-tar
+const nameIsBadForWindows = file => /\*/.test(file)
+
+// a decorator that applies our custom rules to an ignore walker
+const npmWalker = Class => class Walker extends Class {
+ constructor (opt) {
+ opt = opt || {}
+
+ // the order in which rules are applied.
+ opt.ignoreFiles = [
+ rootBuiltinRules,
+ 'package.json',
+ '.npmignore',
+ '.gitignore',
+ packageNecessaryRules
+ ]
+
+ opt.includeEmpty = false
+ opt.path = opt.path || process.cwd()
+ const dirName = path.basename(opt.path)
+ const parentName = path.basename(path.dirname(opt.path))
+ opt.follow =
+ dirName === 'node_modules' ||
+ (parentName === 'node_modules' && /^@/.test(dirName))
+ super(opt)
+
+ // ignore a bunch of things by default at the root level.
+ // also ignore anything in node_modules, except bundled dependencies
+ if (!this.parent) {
+ this.bundled = opt.bundled || []
+ this.bundledScopes = Array.from(new Set(
+ this.bundled.filter(f => /^@/.test(f))
+ .map(f => f.split('/')[0])))
+ const rules = defaultRules.join('\n') + '\n'
+ this.packageJsonCache = opt.packageJsonCache || new Map()
+ super.onReadIgnoreFile(rootBuiltinRules, rules, _=>_)
+ } else {
+ this.bundled = []
+ this.bundledScopes = []
+ this.packageJsonCache = this.parent.packageJsonCache
}
- /**
- * Inserts a String
- *
- * @param value { String } The String value to insert.
- * @param offset { Number } The offset to insert the string at.
- * @param encoding { String } The BufferEncoding to use for writing strings (defaults to instance encoding).
- *
- * @return this
- */
- insertString(value, offset, encoding) {
- utils_1.checkOffsetValue(offset);
- return this._handleString(value, true, offset, encoding);
+ }
+
+ onReaddir (entries) {
+ if (!this.parent) {
+ entries = entries.filter(e =>
+ e !== '.git' &&
+ !(e === 'node_modules' && this.bundled.length === 0)
+ )
}
- /**
- * Writes a String
- *
- * @param value { String } The String value to write.
- * @param arg2 { Number | String } The offset to write the string at, or the BufferEncoding to use.
- * @param encoding { String } The BufferEncoding to use for writing strings (defaults to instance encoding).
- *
- * @return this
- */
- writeString(value, arg2, encoding) {
- return this._handleString(value, false, arg2, encoding);
+ return super.onReaddir(entries)
+ }
+
+ filterEntry (entry, partial) {
+ // get the partial path from the root of the walk
+ const p = this.path.substr(this.root.length + 1)
+ const pkgre = /^node_modules\/(@[^\/]+\/?[^\/]+|[^\/]+)(\/.*)?$/
+ const isRoot = !this.parent
+ const pkg = isRoot && pkgre.test(entry) ?
+ entry.replace(pkgre, '$1') : null
+ const rootNM = isRoot && entry === 'node_modules'
+ const rootPJ = isRoot && entry === 'package.json'
+
+ return (
+ // if we're in a bundled package, check with the parent.
+ /^node_modules($|\/)/i.test(p) ? this.parent.filterEntry(
+ this.basename + '/' + entry, partial)
+
+ // if package is bundled, all files included
+ // also include @scope dirs for bundled scoped deps
+ // they'll be ignored if no files end up in them.
+ // However, this only matters if we're in the root.
+ // node_modules folders elsewhere, like lib/node_modules,
+ // should be included normally unless ignored.
+ : pkg ? -1 !== this.bundled.indexOf(pkg) ||
+ -1 !== this.bundledScopes.indexOf(pkg)
+
+ // only walk top node_modules if we want to bundle something
+ : rootNM ? !!this.bundled.length
+
+ // always include package.json at the root.
+ : rootPJ ? true
+
+ // otherwise, follow ignore-walk's logic
+ : super.filterEntry(entry, partial)
+ )
+ }
+
+ filterEntries () {
+ if (this.ignoreRules['package.json'])
+ this.ignoreRules['.gitignore'] = this.ignoreRules['.npmignore'] = null
+ else if (this.ignoreRules['.npmignore'])
+ this.ignoreRules['.gitignore'] = null
+ this.filterEntries = super.filterEntries
+ super.filterEntries()
+ }
+
+ addIgnoreFile (file, then) {
+ const ig = path.resolve(this.path, file)
+ if (this.packageJsonCache.has(ig))
+ this.onPackageJson(ig, this.packageJsonCache.get(ig), then)
+ else
+ super.addIgnoreFile(file, then)
+ }
+
+ onPackageJson (ig, pkg, then) {
+ this.packageJsonCache.set(ig, pkg)
+
+ // if there's a bin, browser or main, make sure we don't ignore it
+ // also, don't ignore the package.json itself!
+ //
+ // Weird side-effect of this: a readme (etc) file will be included
+ // if it exists anywhere within a folder with a package.json file.
+ // The original intent was only to include these files in the root,
+ // but now users in the wild are dependent on that behavior for
+ // localized documentation and other use cases. Adding a `/` to
+ // these rules, while tempting and arguably more "correct", is a
+ // breaking change.
+ const rules = [
+ pkg.browser ? '!' + pkg.browser : '',
+ pkg.main ? '!' + pkg.main : '',
+ '!package.json',
+ '!npm-shrinkwrap.json',
+ '!@(readme|copying|license|licence|notice|changes|changelog|history){,.*[^~$]}'
+ ]
+ if (pkg.bin) {
+ // always an object, because normalized already
+ for (const key in pkg.bin)
+ rules.push('!' + pkg.bin[key])
}
- /**
- * Reads a null-terminated String from the current read position.
- *
- * @param encoding { String } The BufferEncoding to use for the string (Defaults to instance level encoding).
- *
- * @return { String }
- */
- readStringNT(encoding) {
- if (typeof encoding !== 'undefined') {
- utils_1.checkEncoding(encoding);
- }
- // Set null character position to the end SmartBuffer instance.
- let nullPos = this.length;
- // Find next null character (if one is not found, default from above is used)
- for (let i = this._readOffset; i < this.length; i++) {
- if (this._buff[i] === 0x00) {
- nullPos = i;
- break;
- }
- }
- // Read string value
- const value = this._buff.slice(this._readOffset, nullPos);
- // Increment internal Buffer read offset
- this._readOffset = nullPos + 1;
- return value.toString(encoding || this._encoding);
- }
- /**
- * Inserts a null-terminated String.
- *
- * @param value { String } The String value to write.
- * @param arg2 { Number | String } The offset to write the string to, or the BufferEncoding to use.
- * @param encoding { String } The BufferEncoding to use for writing strings (defaults to instance encoding).
- *
- * @return this
- */
- insertStringNT(value, offset, encoding) {
- utils_1.checkOffsetValue(offset);
- // Write Values
- this.insertString(value, offset, encoding);
- this.insertUInt8(0x00, offset + value.length);
- return this;
- }
- /**
- * Writes a null-terminated String.
- *
- * @param value { String } The String value to write.
- * @param arg2 { Number | String } The offset to write the string to, or the BufferEncoding to use.
- * @param encoding { String } The BufferEncoding to use for writing strings (defaults to instance encoding).
- *
- * @return this
- */
- writeStringNT(value, arg2, encoding) {
- // Write Values
- this.writeString(value, arg2, encoding);
- this.writeUInt8(0x00, typeof arg2 === 'number' ? arg2 + value.length : this.writeOffset);
- return this;
+
+ const data = rules.filter(f => f).join('\n') + '\n'
+ super.onReadIgnoreFile(packageNecessaryRules, data, _=>_)
+
+ if (Array.isArray(pkg.files))
+ super.onReadIgnoreFile('package.json', '*\n' + pkg.files.map(
+ f => '!' + f + '\n!' + f.replace(/\/+$/, '') + '/**'
+ ).join('\n') + '\n', then)
+ else
+ then()
+ }
+
+ // override parent stat function to completely skip any filenames
+ // that will break windows entirely.
+ // XXX(isaacs) Next major version should make this an error instead.
+ stat (entry, file, dir, then) {
+ if (nameIsBadForWindows(entry))
+ then()
+ else
+ super.stat(entry, file, dir, then)
+ }
+
+ // override parent onstat function to nix all symlinks
+ onstat (st, entry, file, dir, then) {
+ if (st.isSymbolicLink())
+ then()
+ else
+ super.onstat(st, entry, file, dir, then)
+ }
+
+ onReadIgnoreFile (file, data, then) {
+ if (file === 'package.json')
+ try {
+ const ig = path.resolve(this.path, file)
+ this.onPackageJson(ig, normalizePackageBin(JSON.parse(data)), then)
+ } catch (er) {
+ // ignore package.json files that are not json
+ then()
+ }
+ else
+ super.onReadIgnoreFile(file, data, then)
+ }
+
+ sort (a, b) {
+ return sort(a, b)
+ }
+}
+
+class Walker extends npmWalker(IgnoreWalker) {
+ walker (entry, then) {
+ new Walker(this.walkerOpt(entry)).on('done', then).start()
+ }
+}
+
+class WalkerSync extends npmWalker(IgnoreWalkerSync) {
+ walker (entry, then) {
+ new WalkerSync(this.walkerOpt(entry)).start()
+ then()
+ }
+}
+
+const walk = (options, callback) => {
+ options = options || {}
+ const p = new Promise((resolve, reject) => {
+ const bw = new BundleWalker(options)
+ bw.on('done', bundled => {
+ options.bundled = bundled
+ options.packageJsonCache = bw.packageJsonCache
+ new Walker(options).on('done', resolve).on('error', reject).start()
+ })
+ bw.start()
+ })
+ return callback ? p.then(res => callback(null, res), callback) : p
+}
+
+const walkSync = options => {
+ options = options || {}
+ const bw = new BundleWalkerSync(options).start()
+ options.bundled = bw.result
+ options.packageJsonCache = bw.packageJsonCache
+ const walker = new WalkerSync(options)
+ walker.start()
+ return walker.result
+}
+
+// optimize for compressibility
+// extname, then basename, then locale alphabetically
+// https://twitter.com/isntitvacant/status/1131094910923231232
+const sort = (a, b) => {
+ const exta = path.extname(a).toLowerCase()
+ const extb = path.extname(b).toLowerCase()
+ const basea = path.basename(a).toLowerCase()
+ const baseb = path.basename(b).toLowerCase()
+
+ return exta.localeCompare(extb) ||
+ basea.localeCompare(baseb) ||
+ a.localeCompare(b)
+}
+
+
+module.exports = walk
+walk.sync = walkSync
+walk.Walker = Walker
+walk.WalkerSync = WalkerSync
+
+
+/***/ }),
+/* 111 */
+/***/ (function(module) {
+
+module.exports = bindActor
+function bindActor () {
+ var args =
+ Array.prototype.slice.call
+ (arguments) // jswtf.
+ , obj = null
+ , fn
+ if (typeof args[0] === "object") {
+ obj = args.shift()
+ fn = args.shift()
+ if (typeof fn === "string")
+ fn = obj[ fn ]
+ } else fn = args.shift()
+ return function (cb) {
+ fn.apply(obj, args.concat(cb)) }
+}
+
+
+/***/ }),
+/* 112 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+
+module.exports = __webpack_require__(146);
+module.exports.HttpsAgent = __webpack_require__(628);
+
+
+/***/ }),
+/* 113 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+
+module.exports = __webpack_require__(964)
+
+
+/***/ }),
+/* 114 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const core = __importStar(__webpack_require__(470));
+const http_client_1 = __webpack_require__(22);
+const auth_1 = __webpack_require__(733);
+const crypto = __importStar(__webpack_require__(417));
+const fs = __importStar(__webpack_require__(747));
+const url_1 = __webpack_require__(835);
+const utils = __importStar(__webpack_require__(15));
+const constants_1 = __webpack_require__(931);
+const downloadUtils_1 = __webpack_require__(251);
+const options_1 = __webpack_require__(538);
+const requestUtils_1 = __webpack_require__(899);
+const versionSalt = '1.0';
+function getCacheApiUrl(resource) {
+ // Ideally we just use ACTIONS_CACHE_URL
+ const baseUrl = (process.env['ACTIONS_CACHE_URL'] ||
+ process.env['ACTIONS_RUNTIME_URL'] ||
+ '').replace('pipelines', 'artifactcache');
+ if (!baseUrl) {
+ throw new Error('Cache Service Url not found, unable to restore cache.');
}
- // Buffers
- /**
- * Reads a Buffer from the internal read position.
- *
- * @param length { Number } The length of data to read as a Buffer.
- *
- * @return { Buffer }
- */
- readBuffer(length) {
- if (typeof length !== 'undefined') {
- utils_1.checkLengthValue(length);
+ const url = `${baseUrl}_apis/artifactcache/${resource}`;
+ core.debug(`Resource Url: ${url}`);
+ return url;
+}
+function createAcceptHeader(type, apiVersion) {
+ return `${type};api-version=${apiVersion}`;
+}
+function getRequestOptions() {
+ const requestOptions = {
+ headers: {
+ Accept: createAcceptHeader('application/json', '6.0-preview.1')
}
- const lengthVal = typeof length === 'number' ? length : this.length;
- const endPoint = Math.min(this.length, this._readOffset + lengthVal);
- // Read buffer value
- const value = this._buff.slice(this._readOffset, endPoint);
- // Increment internal Buffer read offset
- this._readOffset = endPoint;
- return value;
- }
- /**
- * Writes a Buffer to the current write position.
- *
- * @param value { Buffer } The Buffer to write.
- * @param offset { Number } The offset to write the Buffer to.
- *
- * @return this
- */
- insertBuffer(value, offset) {
- utils_1.checkOffsetValue(offset);
- return this._handleBuffer(value, true, offset);
- }
- /**
- * Writes a Buffer to the current write position.
- *
- * @param value { Buffer } The Buffer to write.
- * @param offset { Number } The offset to write the Buffer to.
- *
- * @return this
- */
- writeBuffer(value, offset) {
- return this._handleBuffer(value, false, offset);
- }
- /**
- * Reads a null-terminated Buffer from the current read poisiton.
- *
- * @return { Buffer }
- */
- readBufferNT() {
- // Set null character position to the end SmartBuffer instance.
- let nullPos = this.length;
- // Find next null character (if one is not found, default from above is used)
- for (let i = this._readOffset; i < this.length; i++) {
- if (this._buff[i] === 0x00) {
- nullPos = i;
- break;
- }
+ };
+ return requestOptions;
+}
+function createHttpClient() {
+ const token = process.env['ACTIONS_RUNTIME_TOKEN'] || '';
+ const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
+ return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
+}
+function getCacheVersion(paths, compressionMethod) {
+ const components = paths.concat(!compressionMethod || compressionMethod === constants_1.CompressionMethod.Gzip
+ ? []
+ : [compressionMethod]);
+ // Add salt to cache version to support breaking changes in cache entry
+ components.push(versionSalt);
+ return crypto
+ .createHash('sha256')
+ .update(components.join('|'))
+ .digest('hex');
+}
+exports.getCacheVersion = getCacheVersion;
+function getCacheEntry(keys, paths, options) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const httpClient = createHttpClient();
+ const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod);
+ const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
+ const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
+ if (response.statusCode === 204) {
+ return null;
}
- // Read value
- const value = this._buff.slice(this._readOffset, nullPos);
- // Increment internal Buffer read offset
- this._readOffset = nullPos + 1;
- return value;
- }
- /**
- * Inserts a null-terminated Buffer.
- *
- * @param value { Buffer } The Buffer to write.
- * @param offset { Number } The offset to write the Buffer to.
- *
- * @return this
- */
- insertBufferNT(value, offset) {
- utils_1.checkOffsetValue(offset);
- // Write Values
- this.insertBuffer(value, offset);
- this.insertUInt8(0x00, offset + value.length);
- return this;
- }
- /**
- * Writes a null-terminated Buffer.
- *
- * @param value { Buffer } The Buffer to write.
- * @param offset { Number } The offset to write the Buffer to.
- *
- * @return this
- */
- writeBufferNT(value, offset) {
- // Checks for valid numberic value;
- if (typeof offset !== 'undefined') {
- utils_1.checkOffsetValue(offset);
+ if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) {
+ throw new Error(`Cache service responded with ${response.statusCode}`);
}
- // Write Values
- this.writeBuffer(value, offset);
- this.writeUInt8(0x00, typeof offset === 'number' ? offset + value.length : this._writeOffset);
- return this;
- }
- /**
- * Clears the SmartBuffer instance to its original empty state.
- */
- clear() {
- this._writeOffset = 0;
- this._readOffset = 0;
- this.length = 0;
- return this;
- }
- /**
- * Gets the remaining data left to be read from the SmartBuffer instance.
- *
- * @return { Number }
- */
- remaining() {
- return this.length - this._readOffset;
- }
- /**
- * Gets the current read offset value of the SmartBuffer instance.
- *
- * @return { Number }
- */
- get readOffset() {
- return this._readOffset;
- }
- /**
- * Sets the read offset value of the SmartBuffer instance.
- *
- * @param offset { Number } - The offset value to set.
- */
- set readOffset(offset) {
- utils_1.checkOffsetValue(offset);
- // Check for bounds.
- utils_1.checkTargetOffset(offset, this);
- this._readOffset = offset;
- }
- /**
- * Gets the current write offset value of the SmartBuffer instance.
- *
- * @return { Number }
- */
- get writeOffset() {
- return this._writeOffset;
- }
- /**
- * Sets the write offset value of the SmartBuffer instance.
- *
- * @param offset { Number } - The offset value to set.
- */
- set writeOffset(offset) {
- utils_1.checkOffsetValue(offset);
- // Check for bounds.
- utils_1.checkTargetOffset(offset, this);
- this._writeOffset = offset;
- }
- /**
- * Gets the currently set string encoding of the SmartBuffer instance.
- *
- * @return { BufferEncoding } The string Buffer encoding currently set.
- */
- get encoding() {
- return this._encoding;
- }
- /**
- * Sets the string encoding of the SmartBuffer instance.
- *
- * @param encoding { BufferEncoding } The string Buffer encoding to set.
- */
- set encoding(encoding) {
- utils_1.checkEncoding(encoding);
- this._encoding = encoding;
- }
- /**
- * Gets the underlying internal Buffer. (This includes unmanaged data in the Buffer)
- *
- * @return { Buffer } The Buffer value.
- */
- get internalBuffer() {
- return this._buff;
- }
- /**
- * Gets the value of the internal managed Buffer (Includes managed data only)
- *
- * @param { Buffer }
- */
- toBuffer() {
- return this._buff.slice(0, this.length);
- }
- /**
- * Gets the String value of the internal managed Buffer
- *
- * @param encoding { String } The BufferEncoding to display the Buffer as (defaults to instance level encoding).
- */
- toString(encoding) {
- const encodingVal = typeof encoding === 'string' ? encoding : this._encoding;
- // Check for invalid encoding.
- utils_1.checkEncoding(encodingVal);
- return this._buff.toString(encodingVal, 0, this.length);
- }
- /**
- * Destroys the SmartBuffer instance.
- */
- destroy() {
- this.clear();
- return this;
- }
- /**
- * Handles inserting and writing strings.
- *
- * @param value { String } The String value to insert.
- * @param isInsert { Boolean } True if inserting a string, false if writing.
- * @param arg2 { Number | String } The offset to insert the string at, or the BufferEncoding to use.
- * @param encoding { String } The BufferEncoding to use for writing strings (defaults to instance encoding).
- */
- _handleString(value, isInsert, arg3, encoding) {
- let offsetVal = this._writeOffset;
- let encodingVal = this._encoding;
- // Check for offset
- if (typeof arg3 === 'number') {
- offsetVal = arg3;
- // Check for encoding
+ const cacheResult = response.result;
+ const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation;
+ if (!cacheDownloadUrl) {
+ throw new Error('Cache not found.');
}
- else if (typeof arg3 === 'string') {
- utils_1.checkEncoding(arg3);
- encodingVal = arg3;
- }
- // Check for encoding (third param)
- if (typeof encoding === 'string') {
- utils_1.checkEncoding(encoding);
- encodingVal = encoding;
- }
- // Calculate bytelength of string.
- const byteLength = Buffer.byteLength(value, encodingVal);
- // Ensure there is enough internal Buffer capacity.
- if (isInsert) {
- this.ensureInsertable(byteLength, offsetVal);
- }
- else {
- this._ensureWriteable(byteLength, offsetVal);
- }
- // Write value
- this._buff.write(value, offsetVal, byteLength, encodingVal);
- // Increment internal Buffer write offset;
- if (isInsert) {
- this._writeOffset += byteLength;
- }
- else {
- // If an offset was given, check to see if we wrote beyond the current writeOffset.
- if (typeof arg3 === 'number') {
- this._writeOffset = Math.max(this._writeOffset, offsetVal + byteLength);
- }
- else {
- // If no offset was given, we wrote to the end of the SmartBuffer so increment writeOffset.
- this._writeOffset += byteLength;
- }
- }
- return this;
- }
- /**
- * Handles writing or insert of a Buffer.
- *
- * @param value { Buffer } The Buffer to write.
- * @param offset { Number } The offset to write the Buffer to.
- */
- _handleBuffer(value, isInsert, offset) {
- const offsetVal = typeof offset === 'number' ? offset : this._writeOffset;
- // Ensure there is enough internal Buffer capacity.
- if (isInsert) {
- this.ensureInsertable(value.length, offsetVal);
- }
- else {
- this._ensureWriteable(value.length, offsetVal);
- }
- // Write buffer value
- value.copy(this._buff, offsetVal);
- // Increment internal Buffer write offset;
- if (isInsert) {
- this._writeOffset += value.length;
- }
- else {
- // If an offset was given, check to see if we wrote beyond the current writeOffset.
- if (typeof offset === 'number') {
- this._writeOffset = Math.max(this._writeOffset, offsetVal + value.length);
- }
- else {
- // If no offset was given, we wrote to the end of the SmartBuffer so increment writeOffset.
- this._writeOffset += value.length;
- }
- }
- return this;
- }
- /**
- * Ensures that the internal Buffer is large enough to read data.
- *
- * @param length { Number } The length of the data that needs to be read.
- * @param offset { Number } The offset of the data that needs to be read.
- */
- ensureReadable(length, offset) {
- // Offset value defaults to managed read offset.
- let offsetVal = this._readOffset;
- // If an offset was provided, use it.
- if (typeof offset !== 'undefined') {
- // Checks for valid numberic value;
- utils_1.checkOffsetValue(offset);
- // Overide with custom offset.
- offsetVal = offset;
- }
- // Checks if offset is below zero, or the offset+length offset is beyond the total length of the managed data.
- if (offsetVal < 0 || offsetVal + length > this.length) {
- throw new Error(utils_1.ERRORS.INVALID_READ_BEYOND_BOUNDS);
- }
- }
- /**
- * Ensures that the internal Buffer is large enough to insert data.
- *
- * @param dataLength { Number } The length of the data that needs to be written.
- * @param offset { Number } The offset of the data to be written.
- */
- ensureInsertable(dataLength, offset) {
- // Checks for valid numberic value;
- utils_1.checkOffsetValue(offset);
- // Ensure there is enough internal Buffer capacity.
- this._ensureCapacity(this.length + dataLength);
- // If an offset was provided and its not the very end of the buffer, copy data into appropriate location in regards to the offset.
- if (offset < this.length) {
- this._buff.copy(this._buff, offset + dataLength, offset, this._buff.length);
- }
- // Adjust tracked smart buffer length
- if (offset + dataLength > this.length) {
- this.length = offset + dataLength;
+ core.setSecret(cacheDownloadUrl);
+ core.debug(`Cache Result:`);
+ core.debug(JSON.stringify(cacheResult));
+ return cacheResult;
+ });
+}
+exports.getCacheEntry = getCacheEntry;
+function downloadCache(archiveLocation, archivePath, options) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const archiveUrl = new url_1.URL(archiveLocation);
+ const downloadOptions = options_1.getDownloadOptions(options);
+ if (downloadOptions.useAzureSdk &&
+ archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
+ // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
+ yield downloadUtils_1.downloadCacheStorageSDK(archiveLocation, archivePath, downloadOptions);
}
else {
- this.length += dataLength;
- }
- }
- /**
- * Ensures that the internal Buffer is large enough to write data.
- *
- * @param dataLength { Number } The length of the data that needs to be written.
- * @param offset { Number } The offset of the data to be written (defaults to writeOffset).
- */
- _ensureWriteable(dataLength, offset) {
- const offsetVal = typeof offset === 'number' ? offset : this._writeOffset;
- // Ensure enough capacity to write data.
- this._ensureCapacity(offsetVal + dataLength);
- // Adjust SmartBuffer length (if offset + length is larger than managed length, adjust length)
- if (offsetVal + dataLength > this.length) {
- this.length = offsetVal + dataLength;
- }
- }
- /**
- * Ensures that the internal Buffer is large enough to write at least the given amount of data.
- *
- * @param minLength { Number } The minimum length of the data needs to be written.
- */
- _ensureCapacity(minLength) {
- const oldLength = this._buff.length;
- if (minLength > oldLength) {
- let data = this._buff;
- let newLength = (oldLength * 3) / 2 + 1;
- if (newLength < minLength) {
- newLength = minLength;
- }
- this._buff = Buffer.allocUnsafe(newLength);
- data.copy(this._buff, 0, 0, oldLength);
+ // Otherwise, download using the Actions http-client.
+ yield downloadUtils_1.downloadCacheHttpClient(archiveLocation, archivePath);
}
- }
- /**
- * Reads a numeric number value using the provided function.
- *
- * @typeparam T { number | bigint } The type of the value to be read
- *
- * @param func { Function(offset: number) => number } The function to read data on the internal Buffer with.
- * @param byteSize { Number } The number of bytes read.
- * @param offset { Number } The offset to read from (optional). When this is not provided, the managed readOffset is used instead.
- *
- * @returns { T } the number value
- */
- _readNumberValue(func, byteSize, offset) {
- this.ensureReadable(byteSize, offset);
- // Call Buffer.readXXXX();
- const value = func.call(this._buff, typeof offset === 'number' ? offset : this._readOffset);
- // Adjust internal read offset if an optional read offset was not provided.
- if (typeof offset === 'undefined') {
- this._readOffset += byteSize;
+ });
+}
+exports.downloadCache = downloadCache;
+// Reserve Cache
+function reserveCache(key, paths, options) {
+ var _a, _b;
+ return __awaiter(this, void 0, void 0, function* () {
+ const httpClient = createHttpClient();
+ const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod);
+ const reserveCacheRequest = {
+ key,
+ version
+ };
+ const response = yield requestUtils_1.retryTypedResponse('reserveCache', () => __awaiter(this, void 0, void 0, function* () {
+ return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest);
+ }));
+ return (_b = (_a = response === null || response === void 0 ? void 0 : response.result) === null || _a === void 0 ? void 0 : _a.cacheId) !== null && _b !== void 0 ? _b : -1;
+ });
+}
+exports.reserveCache = reserveCache;
+function getContentRange(start, end) {
+ // Format: `bytes start-end/filesize
+ // start and end are inclusive
+ // filesize can be *
+ // For a 200 byte chunk starting at byte 0:
+ // Content-Range: bytes 0-199/*
+ return `bytes ${start}-${end}/*`;
+}
+function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
+ return __awaiter(this, void 0, void 0, function* () {
+ core.debug(`Uploading chunk of size ${end -
+ start +
+ 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
+ const additionalHeaders = {
+ 'Content-Type': 'application/octet-stream',
+ 'Content-Range': getContentRange(start, end)
+ };
+ const uploadChunkResponse = yield requestUtils_1.retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () {
+ return httpClient.sendStream('PATCH', resourceUrl, openStream(), additionalHeaders);
+ }));
+ if (!requestUtils_1.isSuccessStatusCode(uploadChunkResponse.message.statusCode)) {
+ throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`);
}
- return value;
- }
- /**
- * Inserts a numeric number value based on the given offset and value.
- *
- * @typeparam T { number | bigint } The type of the value to be written
- *
- * @param func { Function(offset: T, offset?) => number} The function to write data on the internal Buffer with.
- * @param byteSize { Number } The number of bytes written.
- * @param value { T } The number value to write.
- * @param offset { Number } the offset to write the number at (REQUIRED).
- *
- * @returns SmartBuffer this buffer
- */
- _insertNumberValue(func, byteSize, value, offset) {
- // Check for invalid offset values.
- utils_1.checkOffsetValue(offset);
- // Ensure there is enough internal Buffer capacity. (raw offset is passed)
- this.ensureInsertable(byteSize, offset);
- // Call buffer.writeXXXX();
- func.call(this._buff, value, offset);
- // Adjusts internally managed write offset.
- this._writeOffset += byteSize;
- return this;
- }
- /**
- * Writes a numeric number value based on the given offset and value.
- *
- * @typeparam T { number | bigint } The type of the value to be written
- *
- * @param func { Function(offset: T, offset?) => number} The function to write data on the internal Buffer with.
- * @param byteSize { Number } The number of bytes written.
- * @param value { T } The number value to write.
- * @param offset { Number } the offset to write the number at (REQUIRED).
- *
- * @returns SmartBuffer this buffer
- */
- _writeNumberValue(func, byteSize, value, offset) {
- // If an offset was provided, validate it.
- if (typeof offset === 'number') {
- // Check if we're writing beyond the bounds of the managed data.
- if (offset < 0) {
- throw new Error(utils_1.ERRORS.INVALID_WRITE_BEYOND_BOUNDS);
- }
- utils_1.checkOffsetValue(offset);
+ });
+}
+function uploadFile(httpClient, cacheId, archivePath, options) {
+ return __awaiter(this, void 0, void 0, function* () {
+ // Upload Chunks
+ const fileSize = fs.statSync(archivePath).size;
+ const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
+ const fd = fs.openSync(archivePath, 'r');
+ const uploadOptions = options_1.getUploadOptions(options);
+ const concurrency = utils.assertDefined('uploadConcurrency', uploadOptions.uploadConcurrency);
+ const maxChunkSize = utils.assertDefined('uploadChunkSize', uploadOptions.uploadChunkSize);
+ const parallelUploads = [...new Array(concurrency).keys()];
+ core.debug('Awaiting all uploads');
+ let offset = 0;
+ try {
+ yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {
+ while (offset < fileSize) {
+ const chunkSize = Math.min(fileSize - offset, maxChunkSize);
+ const start = offset;
+ const end = offset + chunkSize - 1;
+ offset += maxChunkSize;
+ yield uploadChunk(httpClient, resourceUrl, () => fs
+ .createReadStream(archivePath, {
+ fd,
+ start,
+ end,
+ autoClose: false
+ })
+ .on('error', error => {
+ throw new Error(`Cache upload failed because file read failed with ${error.message}`);
+ }), start, end);
+ }
+ })));
}
- // Default to writeOffset if no offset value was given.
- const offsetVal = typeof offset === 'number' ? offset : this._writeOffset;
- // Ensure there is enough internal Buffer capacity. (raw offset is passed)
- this._ensureWriteable(byteSize, offsetVal);
- func.call(this._buff, value, offsetVal);
- // If an offset was given, check to see if we wrote beyond the current writeOffset.
- if (typeof offset === 'number') {
- this._writeOffset = Math.max(this._writeOffset, offsetVal + byteSize);
+ finally {
+ fs.closeSync(fd);
}
- else {
- // If no numeric offset was given, we wrote to the end of the SmartBuffer so increment writeOffset.
- this._writeOffset += byteSize;
+ return;
+ });
+}
+function commitCache(httpClient, cacheId, filesize) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const commitCacheRequest = { size: filesize };
+ return yield requestUtils_1.retryTypedResponse('commitCache', () => __awaiter(this, void 0, void 0, function* () {
+ return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest);
+ }));
+ });
+}
+function saveCache(cacheId, archivePath, options) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const httpClient = createHttpClient();
+ core.debug('Upload cache');
+ yield uploadFile(httpClient, cacheId, archivePath, options);
+ // Commit Cache
+ core.debug('Commiting cache');
+ const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath);
+ const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
+ if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) {
+ throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`);
}
- return this;
- }
+ core.info('Cache saved successfully');
+ });
}
-exports.SmartBuffer = SmartBuffer;
-//# sourceMappingURL=smartbuffer.js.map
+exports.saveCache = saveCache;
+//# sourceMappingURL=cacheHttpClient.js.map
/***/ }),
-/* 119 */,
-/* 120 */
-/***/ (function(module) {
+/* 115 */,
+/* 116 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
-function createError(msg, code, props) {
- var err = msg instanceof Error ? msg : new Error(msg);
- var key;
-
- if (typeof code === 'object') {
- props = code;
- } else if (code != null) {
- err.code = code;
- }
+const figgyPudding = __webpack_require__(965)
+const getStream = __webpack_require__(145)
+const npa = __webpack_require__(482)
+const npmFetch = __webpack_require__(789)
+const {PassThrough} = __webpack_require__(794)
+const validate = __webpack_require__(772)
- if (props) {
- for (key in props) {
- err[key] = props[key];
- }
- }
+const AccessConfig = figgyPudding({
+ Promise: {default: () => Promise}
+})
- return err;
+const eu = encodeURIComponent
+const npar = spec => {
+ spec = npa(spec)
+ if (!spec.registry) {
+ throw new Error('`spec` must be a registry spec')
+ }
+ return spec
}
-module.exports = createError;
-
+const cmd = module.exports = {}
-/***/ }),
-/* 121 */,
-/* 122 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+cmd.public = (spec, opts) => setAccess(spec, 'public', opts)
+cmd.restricted = (spec, opts) => setAccess(spec, 'restricted', opts)
+function setAccess (spec, access, opts) {
+ opts = AccessConfig(opts)
+ return pwrap(opts, () => {
+ spec = npar(spec)
+ validate('OSO', [spec, access, opts])
+ const uri = `/-/package/${eu(spec.name)}/access`
+ return npmFetch(uri, opts.concat({
+ method: 'POST',
+ body: {access},
+ spec
+ }))
+ }).then(res => res.body.resume() && true)
+}
-"use strict";
+cmd.grant = (spec, entity, permissions, opts) => {
+ opts = AccessConfig(opts)
+ return pwrap(opts, () => {
+ spec = npar(spec)
+ const {scope, team} = splitEntity(entity)
+ validate('OSSSO', [spec, scope, team, permissions, opts])
+ if (permissions !== 'read-write' && permissions !== 'read-only') {
+ throw new Error('`permissions` must be `read-write` or `read-only`. Got `' + permissions + '` instead')
+ }
+ const uri = `/-/team/${eu(scope)}/${eu(team)}/package`
+ return npmFetch(uri, opts.concat({
+ method: 'PUT',
+ body: {package: spec.name, permissions},
+ scope,
+ spec,
+ ignoreBody: true
+ }))
+ }).then(() => true)
+}
+cmd.revoke = (spec, entity, opts) => {
+ opts = AccessConfig(opts)
+ return pwrap(opts, () => {
+ spec = npar(spec)
+ const {scope, team} = splitEntity(entity)
+ validate('OSSO', [spec, scope, team, opts])
+ const uri = `/-/team/${eu(scope)}/${eu(team)}/package`
+ return npmFetch(uri, opts.concat({
+ method: 'DELETE',
+ body: {package: spec.name},
+ scope,
+ spec,
+ ignoreBody: true
+ }))
+ }).then(() => true)
+}
-class FiggyPudding {
- constructor (specs, opts, providers) {
- this.__specs = specs || {}
- Object.keys(this.__specs).forEach(alias => {
- if (typeof this.__specs[alias] === 'string') {
- const key = this.__specs[alias]
- const realSpec = this.__specs[key]
- if (realSpec) {
- const aliasArr = realSpec.aliases || []
- aliasArr.push(alias, key)
- realSpec.aliases = [...(new Set(aliasArr))]
- this.__specs[alias] = realSpec
- } else {
- throw new Error(`Alias refers to invalid key: ${key} -> ${alias}`)
- }
+cmd.lsPackages = (entity, opts) => {
+ opts = AccessConfig(opts)
+ return pwrap(opts, () => {
+ return getStream.array(
+ cmd.lsPackages.stream(entity, opts)
+ ).then(data => data.reduce((acc, [key, val]) => {
+ if (!acc) {
+ acc = {}
}
- })
- this.__opts = opts || {}
- this.__providers = reverse((providers).filter(
- x => x != null && typeof x === 'object'
- ))
- this.__isFiggyPudding = true
- }
- get (key) {
- return pudGet(this, key, true)
- }
- get [Symbol.toStringTag] () { return 'FiggyPudding' }
- forEach (fn, thisArg = this) {
- for (let [key, value] of this.entries()) {
- fn.call(thisArg, value, key, this)
- }
- }
- toJSON () {
- const obj = {}
- this.forEach((val, key) => {
- obj[key] = val
- })
- return obj
+ acc[key] = val
+ return acc
+ }, null))
+ })
+}
+
+cmd.lsPackages.stream = (entity, opts) => {
+ validate('SO|SZ', [entity, opts])
+ opts = AccessConfig(opts)
+ const {scope, team} = splitEntity(entity)
+ let uri
+ if (team) {
+ uri = `/-/team/${eu(scope)}/${eu(team)}/package`
+ } else {
+ uri = `/-/org/${eu(scope)}/package`
}
- * entries (_matcher) {
- for (let key of Object.keys(this.__specs)) {
- yield [key, this.get(key)]
- }
- const matcher = _matcher || this.__opts.other
- if (matcher) {
- const seen = new Set()
- for (let p of this.__providers) {
- const iter = p.entries ? p.entries(matcher) : entries(p)
- for (let [key, val] of iter) {
- if (matcher(key) && !seen.has(key)) {
- seen.add(key)
- yield [key, val]
- }
- }
+ opts = opts.concat({
+ query: {format: 'cli'},
+ mapJson (value, [key]) {
+ if (value === 'read') {
+ return [key, 'read-only']
+ } else if (value === 'write') {
+ return [key, 'read-write']
+ } else {
+ return [key, value]
}
}
- }
- * [Symbol.iterator] () {
- for (let [key, value] of this.entries()) {
- yield [key, value]
+ })
+ const ret = new PassThrough({objectMode: true})
+ npmFetch.json.stream(uri, '*', opts).on('error', err => {
+ if (err.code === 'E404' && !team) {
+ uri = `/-/user/${eu(scope)}/package`
+ npmFetch.json.stream(uri, '*', opts).on(
+ 'error', err => ret.emit('error', err)
+ ).pipe(ret)
+ } else {
+ ret.emit('error', err)
}
- }
- * keys () {
- for (let [key] of this.entries()) {
- yield key
- }
- }
- * values () {
- for (let [, value] of this.entries()) {
- yield value
- }
- }
- concat (...moreConfig) {
- return new Proxy(new FiggyPudding(
- this.__specs,
- this.__opts,
- reverse(this.__providers).concat(moreConfig)
- ), proxyHandler)
- }
+ }).pipe(ret)
+ return ret
}
-try {
- const util = __webpack_require__(669)
- FiggyPudding.prototype[util.inspect.custom] = function (depth, opts) {
- return (
- this[Symbol.toStringTag] + ' '
- ) + util.inspect(this.toJSON(), opts)
- }
-} catch (e) {}
-function BadKeyError (key) {
- throw Object.assign(new Error(
- `invalid config key requested: ${key}`
- ), {code: 'EBADKEY'})
+cmd.lsCollaborators = (spec, user, opts) => {
+ if (typeof user === 'object' && !opts) {
+ opts = user
+ user = undefined
+ }
+ opts = AccessConfig(opts)
+ return pwrap(opts, () => {
+ return getStream.array(
+ cmd.lsCollaborators.stream(spec, user, opts)
+ ).then(data => data.reduce((acc, [key, val]) => {
+ if (!acc) {
+ acc = {}
+ }
+ acc[key] = val
+ return acc
+ }, null))
+ })
}
-function pudGet (pud, key, validate) {
- let spec = pud.__specs[key]
- if (validate && !spec && (!pud.__opts.other || !pud.__opts.other(key))) {
- BadKeyError(key)
- } else {
- if (!spec) { spec = {} }
- let ret
- for (let p of pud.__providers) {
- ret = tryGet(key, p)
- if (ret === undefined && spec.aliases && spec.aliases.length) {
- for (let alias of spec.aliases) {
- if (alias === key) { continue }
- ret = tryGet(alias, p)
- if (ret !== undefined) {
- break
- }
- }
- }
- if (ret !== undefined) {
- break
- }
- }
- if (ret === undefined && spec.default !== undefined) {
- if (typeof spec.default === 'function') {
- return spec.default(pud)
+cmd.lsCollaborators.stream = (spec, user, opts) => {
+ if (typeof user === 'object' && !opts) {
+ opts = user
+ user = undefined
+ }
+ opts = AccessConfig(opts)
+ spec = npar(spec)
+ validate('OSO|OZO', [spec, user, opts])
+ const uri = `/-/package/${eu(spec.name)}/collaborators`
+ return npmFetch.json.stream(uri, '*', opts.concat({
+ query: {format: 'cli', user: user || undefined},
+ mapJson (value, [key]) {
+ if (value === 'read') {
+ return [key, 'read-only']
+ } else if (value === 'write') {
+ return [key, 'read-write']
} else {
- return spec.default
+ return [key, value]
}
- } else {
- return ret
}
- }
-}
-
-function tryGet (key, p) {
- let ret
- if (p.__isFiggyPudding) {
- ret = pudGet(p, key, false)
- } else if (typeof p.get === 'function') {
- ret = p.get(key)
- } else {
- ret = p[key]
- }
- return ret
+ }))
}
-const proxyHandler = {
- has (obj, prop) {
- return prop in obj.__specs && pudGet(obj, prop, false) !== undefined
- },
- ownKeys (obj) {
- return Object.keys(obj.__specs)
- },
- get (obj, prop) {
- if (
- typeof prop === 'symbol' ||
- prop.slice(0, 2) === '__' ||
- prop in FiggyPudding.prototype
- ) {
- return obj[prop]
- }
- return obj.get(prop)
- },
- set (obj, prop, value) {
- if (
- typeof prop === 'symbol' ||
- prop.slice(0, 2) === '__'
- ) {
- obj[prop] = value
- return true
- } else {
- throw new Error('figgyPudding options cannot be modified. Use .concat() instead.')
- }
- },
- deleteProperty () {
- throw new Error('figgyPudding options cannot be deleted. Use .concat() and shadow them instead.')
- }
+cmd.tfaRequired = (spec, opts) => setRequires2fa(spec, true, opts)
+cmd.tfaNotRequired = (spec, opts) => setRequires2fa(spec, false, opts)
+function setRequires2fa (spec, required, opts) {
+ opts = AccessConfig(opts)
+ return new opts.Promise((resolve, reject) => {
+ spec = npar(spec)
+ validate('OBO', [spec, required, opts])
+ const uri = `/-/package/${eu(spec.name)}/access`
+ return npmFetch(uri, opts.concat({
+ method: 'POST',
+ body: {publish_requires_tfa: required},
+ spec,
+ ignoreBody: true
+ })).then(resolve, reject)
+ }).then(() => true)
}
-module.exports = figgyPudding
-function figgyPudding (specs, opts) {
- function factory (...providers) {
- return new Proxy(new FiggyPudding(
- specs,
- opts,
- providers
- ), proxyHandler)
- }
- return factory
+cmd.edit = () => {
+ throw new Error('Not implemented yet')
}
-function reverse (arr) {
- const ret = []
- arr.forEach(x => ret.unshift(x))
- return ret
+function splitEntity (entity = '') {
+ let [, scope, team] = entity.match(/^@?([^:]+)(?::(.*))?$/) || []
+ return {scope, team}
}
-function entries (obj) {
- return Object.keys(obj).map(k => [k, obj[k]])
+function pwrap (opts, fn) {
+ return new opts.Promise((resolve, reject) => {
+ fn().then(resolve, reject)
+ })
}
/***/ }),
-/* 123 */,
-/* 124 */,
-/* 125 */,
-/* 126 */,
-/* 127 */
-/***/ (function(module, exports, __webpack_require__) {
+/* 117 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
-"use strict";
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+var pathModule = __webpack_require__(622);
+var isWindows = process.platform === 'win32';
+var fs = __webpack_require__(747);
-const path = __webpack_require__(622)
-const nopt = __webpack_require__(401)
-const log = __webpack_require__(412)
-const childProcess = __webpack_require__(129)
-const EE = __webpack_require__(614).EventEmitter
-const inherits = __webpack_require__(669).inherits
-const commands = [
- // Module build commands
- 'build',
- 'clean',
- 'configure',
- 'rebuild',
- // Development Header File management commands
- 'install',
- 'list',
- 'remove'
-]
-const aliases = {
- ls: 'list',
- rm: 'remove'
-}
+// JavaScript implementation of realpath, ported from node pre-v6
-// differentiate node-gyp's logs from npm's
-log.heading = 'gyp'
+var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG);
-function gyp () {
- return new Gyp()
-}
+function rethrow() {
+ // Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and
+ // is fairly slow to generate.
+ var callback;
+ if (DEBUG) {
+ var backtrace = new Error;
+ callback = debugCallback;
+ } else
+ callback = missingCallback;
-function Gyp () {
- var self = this
+ return callback;
- this.devDir = ''
- this.commands = {}
+ function debugCallback(err) {
+ if (err) {
+ backtrace.message = err.message;
+ err = backtrace;
+ missingCallback(err);
+ }
+ }
- commands.forEach(function (command) {
- self.commands[command] = function (argv, callback) {
- log.verbose('command', command, argv)
- return require('./' + command)(self, argv, callback)
+ function missingCallback(err) {
+ if (err) {
+ if (process.throwDeprecation)
+ throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs
+ else if (!process.noDeprecation) {
+ var msg = 'fs: missing callback ' + (err.stack || err.message);
+ if (process.traceDeprecation)
+ console.trace(msg);
+ else
+ console.error(msg);
+ }
}
- })
+ }
}
-inherits(Gyp, EE)
-exports.Gyp = Gyp
-var proto = Gyp.prototype
-
-/**
- * Export the contents of the package.json.
- */
-proto.package = __webpack_require__(387)
+function maybeCallback(cb) {
+ return typeof cb === 'function' ? cb : rethrow();
+}
-/**
- * nopt configuration definitions
- */
+var normalize = pathModule.normalize;
-proto.configDefs = {
- help: Boolean, // everywhere
- arch: String, // 'configure'
- cafile: String, // 'install'
- debug: Boolean, // 'build'
- directory: String, // bin
- make: String, // 'build'
- msvs_version: String, // 'configure'
- ensure: Boolean, // 'install'
- solution: String, // 'build' (windows only)
- proxy: String, // 'install'
- noproxy: String, // 'install'
- devdir: String, // everywhere
- nodedir: String, // 'configure'
- loglevel: String, // everywhere
- python: String, // 'configure'
- 'dist-url': String, // 'install'
- tarball: String, // 'install'
- jobs: String, // 'build'
- thin: String // 'configure'
+// Regexp that finds the next partion of a (partial) path
+// result is [base_with_slash, base], e.g. ['somedir/', 'somedir']
+if (isWindows) {
+ var nextPartRe = /(.*?)(?:[\/\\]+|$)/g;
+} else {
+ var nextPartRe = /(.*?)(?:[\/]+|$)/g;
}
-/**
- * nopt shorthands
- */
-
-proto.shorthands = {
- release: '--no-debug',
- C: '--directory',
- debug: '--debug',
- j: '--jobs',
- silly: '--loglevel=silly',
- verbose: '--loglevel=verbose',
- silent: '--loglevel=silent'
+// Regex to find the device root, including trailing slash. E.g. 'c:\\'.
+if (isWindows) {
+ var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/;
+} else {
+ var splitRootRe = /^[\/]*/;
}
-/**
- * expose the command aliases for the bin file to use.
- */
+exports.realpathSync = function realpathSync(p, cache) {
+ // make p is absolute
+ p = pathModule.resolve(p);
-proto.aliases = aliases
+ if (cache && Object.prototype.hasOwnProperty.call(cache, p)) {
+ return cache[p];
+ }
-/**
- * Parses the given argv array and sets the 'opts',
- * 'argv' and 'command' properties.
- */
+ var original = p,
+ seenLinks = {},
+ knownHard = {};
-proto.parseArgv = function parseOpts (argv) {
- this.opts = nopt(this.configDefs, this.shorthands, argv)
- this.argv = this.opts.argv.remain.slice()
+ // current character position in p
+ var pos;
+ // the partial path so far, including a trailing slash if any
+ var current;
+ // the partial path without a trailing slash (except when pointing at a root)
+ var base;
+ // the partial path scanned in the previous round, with slash
+ var previous;
- var commands = this.todo = []
+ start();
- // create a copy of the argv array with aliases mapped
- argv = this.argv.map(function (arg) {
- // is this an alias?
- if (arg in this.aliases) {
- arg = this.aliases[arg]
- }
- return arg
- }, this)
+ function start() {
+ // Skip over roots
+ var m = splitRootRe.exec(p);
+ pos = m[0].length;
+ current = m[0];
+ base = m[0];
+ previous = '';
- // process the mapped args into "command" objects ("name" and "args" props)
- argv.slice().forEach(function (arg) {
- if (arg in this.commands) {
- var args = argv.splice(0, argv.indexOf(arg))
- argv.shift()
- if (commands.length > 0) {
- commands[commands.length - 1].args = args
- }
- commands.push({ name: arg, args: [] })
+ // On windows, check that the root exists. On unix there is no need.
+ if (isWindows && !knownHard[base]) {
+ fs.lstatSync(base);
+ knownHard[base] = true;
}
- }, this)
- if (commands.length > 0) {
- commands[commands.length - 1].args = argv.splice(0)
}
- // support for inheriting config env variables from npm
- var npmConfigPrefix = 'npm_config_'
- Object.keys(process.env).forEach(function (name) {
- if (name.indexOf(npmConfigPrefix) !== 0) {
- return
+ // walk down the path, swapping out linked pathparts for their real
+ // values
+ // NB: p.length changes.
+ while (pos < p.length) {
+ // find the next part
+ nextPartRe.lastIndex = pos;
+ var result = nextPartRe.exec(p);
+ previous = current;
+ current += result[0];
+ base = previous + result[1];
+ pos = nextPartRe.lastIndex;
+
+ // continue if not a symlink
+ if (knownHard[base] || (cache && cache[base] === base)) {
+ continue;
}
- var val = process.env[name]
- if (name === npmConfigPrefix + 'loglevel') {
- log.level = val
+
+ var resolvedLink;
+ if (cache && Object.prototype.hasOwnProperty.call(cache, base)) {
+ // some known symbolic link. no need to stat again.
+ resolvedLink = cache[base];
} else {
- // add the user-defined options to the config
- name = name.substring(npmConfigPrefix.length)
- // gyp@741b7f1 enters an infinite loop when it encounters
- // zero-length options so ensure those don't get through.
- if (name) {
- this.opts[name] = val
+ var stat = fs.lstatSync(base);
+ if (!stat.isSymbolicLink()) {
+ knownHard[base] = true;
+ if (cache) cache[base] = base;
+ continue;
+ }
+
+ // read the link if it wasn't read before
+ // dev/ino always return 0 on windows, so skip the check.
+ var linkTarget = null;
+ if (!isWindows) {
+ var id = stat.dev.toString(32) + ':' + stat.ino.toString(32);
+ if (seenLinks.hasOwnProperty(id)) {
+ linkTarget = seenLinks[id];
+ }
+ }
+ if (linkTarget === null) {
+ fs.statSync(base);
+ linkTarget = fs.readlinkSync(base);
}
+ resolvedLink = pathModule.resolve(previous, linkTarget);
+ // track this, if given a cache.
+ if (cache) cache[base] = resolvedLink;
+ if (!isWindows) seenLinks[id] = linkTarget;
}
- }, this)
- if (this.opts.loglevel) {
- log.level = this.opts.loglevel
+ // resolve the link, then start over
+ p = pathModule.resolve(resolvedLink, p.slice(pos));
+ start();
}
- log.resume()
-}
-/**
- * Spawns a child process and emits a 'spawn' event.
- */
+ if (cache) cache[original] = p;
-proto.spawn = function spawn (command, args, opts) {
- if (!opts) {
- opts = {}
- }
- if (!opts.silent && !opts.stdio) {
- opts.stdio = [0, 1, 2]
+ return p;
+};
+
+
+exports.realpath = function realpath(p, cache, cb) {
+ if (typeof cb !== 'function') {
+ cb = maybeCallback(cache);
+ cache = null;
}
- var cp = childProcess.spawn(command, args, opts)
- log.info('spawn', command)
- log.info('spawn args', args)
- return cp
-}
-/**
- * Returns the usage instructions for node-gyp.
- */
+ // make p is absolute
+ p = pathModule.resolve(p);
-proto.usage = function usage () {
- var str = [
- '',
- ' Usage: node-gyp [options]',
- '',
- ' where is one of:',
- commands.map(function (c) {
- return ' - ' + c + ' - ' + require('./' + c).usage
- }).join('\n'),
- '',
- 'node-gyp@' + this.version + ' ' + path.resolve(__dirname, '..'),
- 'node@' + process.versions.node
- ].join('\n')
- return str
-}
+ if (cache && Object.prototype.hasOwnProperty.call(cache, p)) {
+ return process.nextTick(cb.bind(null, null, cache[p]));
+ }
-/**
- * Version number getter.
- */
+ var original = p,
+ seenLinks = {},
+ knownHard = {};
-Object.defineProperty(proto, 'version', {
- get: function () {
- return this.package.version
- },
- enumerable: true
-})
-
-module.exports = exports = gyp
-
-
-/***/ }),
-/* 128 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-"use strict";
-
-
-module.exports = spawn
-
-const _spawn = __webpack_require__(129).spawn
-const EventEmitter = __webpack_require__(614).EventEmitter
-
-let progressEnabled
-let running = 0
-
-function startRunning (log) {
- if (progressEnabled == null) progressEnabled = log.progressEnabled
- if (progressEnabled) log.disableProgress()
- ++running
-}
-
-function stopRunning (log) {
- --running
- if (progressEnabled && running === 0) log.enableProgress()
-}
-
-function willCmdOutput (stdio) {
- if (stdio === 'inherit') return true
- if (!Array.isArray(stdio)) return false
- for (let fh = 1; fh <= 2; ++fh) {
- if (stdio[fh] === 'inherit') return true
- if (stdio[fh] === 1 || stdio[fh] === 2) return true
- }
- return false
-}
+ // current character position in p
+ var pos;
+ // the partial path so far, including a trailing slash if any
+ var current;
+ // the partial path without a trailing slash (except when pointing at a root)
+ var base;
+ // the partial path scanned in the previous round, with slash
+ var previous;
-function spawn (cmd, args, options, log) {
- const cmdWillOutput = willCmdOutput(options && options.stdio)
+ start();
- if (cmdWillOutput) startRunning(log)
- const raw = _spawn(cmd, args, options)
- const cooked = new EventEmitter()
+ function start() {
+ // Skip over roots
+ var m = splitRootRe.exec(p);
+ pos = m[0].length;
+ current = m[0];
+ base = m[0];
+ previous = '';
- raw.on('error', function (er) {
- if (cmdWillOutput) stopRunning(log)
- er.file = cmd
- cooked.emit('error', er)
- }).on('close', function (code, signal) {
- if (cmdWillOutput) stopRunning(log)
- // Create ENOENT error because Node.js v8.0 will not emit
- // an `error` event if the command could not be found.
- if (code === 127) {
- const er = new Error('spawn ENOENT')
- er.code = 'ENOENT'
- er.errno = 'ENOENT'
- er.syscall = 'spawn'
- er.file = cmd
- cooked.emit('error', er)
+ // On windows, check that the root exists. On unix there is no need.
+ if (isWindows && !knownHard[base]) {
+ fs.lstat(base, function(err) {
+ if (err) return cb(err);
+ knownHard[base] = true;
+ LOOP();
+ });
} else {
- cooked.emit('close', code, signal)
+ process.nextTick(LOOP);
}
- })
-
- cooked.stdin = raw.stdin
- cooked.stdout = raw.stdout
- cooked.stderr = raw.stderr
- cooked.kill = function (sig) { return raw.kill(sig) }
-
- return cooked
-}
-
-
-/***/ }),
-/* 129 */
-/***/ (function(module) {
-
-module.exports = require("child_process");
-
-/***/ }),
-/* 130 */,
-/* 131 */,
-/* 132 */
-/***/ (function(module) {
-
-module.exports = {"repositories":"'repositories' (plural) Not supported. Please pick one as the 'repository' field","missingRepository":"No repository field.","brokenGitUrl":"Probably broken git url: %s","nonObjectScripts":"scripts must be an object","nonStringScript":"script values must be string commands","nonArrayFiles":"Invalid 'files' member","invalidFilename":"Invalid filename in 'files' list: %s","nonArrayBundleDependencies":"Invalid 'bundleDependencies' list. Must be array of package names","nonStringBundleDependency":"Invalid bundleDependencies member: %s","nonDependencyBundleDependency":"Non-dependency in bundleDependencies: %s","nonObjectDependencies":"%s field must be an object","nonStringDependency":"Invalid dependency: %s %s","deprecatedArrayDependencies":"specifying %s as array is deprecated","deprecatedModules":"modules field is deprecated","nonArrayKeywords":"keywords should be an array of strings","nonStringKeyword":"keywords should be an array of strings","conflictingName":"%s is also the name of a node core module.","nonStringDescription":"'description' field should be a string","missingDescription":"No description","missingReadme":"No README data","missingLicense":"No license field.","nonEmailUrlBugsString":"Bug string field must be url, email, or {email,url}","nonUrlBugsUrlField":"bugs.url field must be a string url. Deleted.","nonEmailBugsEmailField":"bugs.email field must be a string email. Deleted.","emptyNormalizedBugs":"Normalized value of bugs field is an empty object. Deleted.","nonUrlHomepage":"homepage field must be a string url. Deleted.","invalidLicense":"license should be a valid SPDX license expression","typo":"%s should probably be %s."};
-
-/***/ }),
-/* 133 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-"use strict";
-
+ }
-const BB = __webpack_require__(489)
+ // walk down the path, swapping out linked pathparts for their real
+ // values
+ function LOOP() {
+ // stop if scanned past end of path
+ if (pos >= p.length) {
+ if (cache) cache[original] = p;
+ return cb(null, p);
+ }
-const chownr = BB.promisify(__webpack_require__(941))
-const mkdirp = BB.promisify(__webpack_require__(626))
-const inflight = __webpack_require__(593)
-const inferOwner = __webpack_require__(686)
+ // find the next part
+ nextPartRe.lastIndex = pos;
+ var result = nextPartRe.exec(p);
+ previous = current;
+ current += result[0];
+ base = previous + result[1];
+ pos = nextPartRe.lastIndex;
-// Memoize getuid()/getgid() calls.
-// patch process.setuid/setgid to invalidate cached value on change
-const self = { uid: null, gid: null }
-const getSelf = () => {
- if (typeof self.uid !== 'number') {
- self.uid = process.getuid()
- const setuid = process.setuid
- process.setuid = (uid) => {
- self.uid = null
- process.setuid = setuid
- return process.setuid(uid)
- }
- }
- if (typeof self.gid !== 'number') {
- self.gid = process.getgid()
- const setgid = process.setgid
- process.setgid = (gid) => {
- self.gid = null
- process.setgid = setgid
- return process.setgid(gid)
+ // continue if not a symlink
+ if (knownHard[base] || (cache && cache[base] === base)) {
+ return process.nextTick(LOOP);
}
- }
-}
-module.exports.chownr = fixOwner
-function fixOwner (cache, filepath) {
- if (!process.getuid) {
- // This platform doesn't need ownership fixing
- return BB.resolve()
- }
+ if (cache && Object.prototype.hasOwnProperty.call(cache, base)) {
+ // known symbolic link. no need to stat again.
+ return gotResolvedLink(cache[base]);
+ }
- getSelf()
- if (self.uid !== 0) {
- // almost certainly can't chown anyway
- return BB.resolve()
+ return fs.lstat(base, gotStat);
}
- return BB.resolve(inferOwner(cache)).then(owner => {
- const { uid, gid } = owner
-
- // No need to override if it's already what we used.
- if (self.uid === uid && self.gid === gid) {
- return
- }
-
- return inflight(
- 'fixOwner: fixing ownership on ' + filepath,
- () => chownr(
- filepath,
- typeof uid === 'number' ? uid : self.uid,
- typeof gid === 'number' ? gid : self.gid
- ).catch({ code: 'ENOENT' }, () => null)
- )
- })
-}
+ function gotStat(err, stat) {
+ if (err) return cb(err);
-module.exports.chownr.sync = fixOwnerSync
-function fixOwnerSync (cache, filepath) {
- if (!process.getuid) {
- // This platform doesn't need ownership fixing
- return
- }
- const { uid, gid } = inferOwner.sync(cache)
- getSelf()
- if (self.uid === uid && self.gid === gid) {
- // No need to override if it's already what we used.
- return
- }
- try {
- chownr.sync(
- filepath,
- typeof uid === 'number' ? uid : self.uid,
- typeof gid === 'number' ? gid : self.gid
- )
- } catch (err) {
- // only catch ENOENT, any other error is a problem.
- if (err.code === 'ENOENT') {
- return null
+ // if not a symlink, skip to the next path part
+ if (!stat.isSymbolicLink()) {
+ knownHard[base] = true;
+ if (cache) cache[base] = base;
+ return process.nextTick(LOOP);
}
- throw err
- }
-}
-module.exports.mkdirfix = mkdirfix
-function mkdirfix (cache, p, cb) {
- // we have to infer the owner _before_ making the directory, even though
- // we aren't going to use the results, since the cache itself might not
- // exist yet. If we mkdirp it, then our current uid/gid will be assumed
- // to be correct if it creates the cache folder in the process.
- return BB.resolve(inferOwner(cache)).then(() => {
- return mkdirp(p).then(made => {
- if (made) {
- return fixOwner(cache, made).then(() => made)
+ // stat & read the link if not read before
+ // call gotTarget as soon as the link target is known
+ // dev/ino always return 0 on windows, so skip the check.
+ if (!isWindows) {
+ var id = stat.dev.toString(32) + ':' + stat.ino.toString(32);
+ if (seenLinks.hasOwnProperty(id)) {
+ return gotTarget(null, seenLinks[id], base);
}
- }).catch({ code: 'EEXIST' }, () => {
- // There's a race in mkdirp!
- return fixOwner(cache, p).then(() => null)
- })
- })
-}
-
-module.exports.mkdirfix.sync = mkdirfixSync
-function mkdirfixSync (cache, p) {
- try {
- inferOwner.sync(cache)
- const made = mkdirp.sync(p)
- if (made) {
- fixOwnerSync(cache, made)
- return made
- }
- } catch (err) {
- if (err.code === 'EEXIST') {
- fixOwnerSync(cache, p)
- return null
- } else {
- throw err
}
- }
-}
-
-
-/***/ }),
-/* 134 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-"use strict";
-
-
-const BB = __webpack_require__(489)
-
-const fetch = __webpack_require__(789)
-const manifest = __webpack_require__(935)
-const optCheck = __webpack_require__(420)
-const PassThrough = __webpack_require__(794).PassThrough
-const ssri = __webpack_require__(951)
-const url = __webpack_require__(835)
+ fs.stat(base, function(err) {
+ if (err) return cb(err);
-module.exports = tarball
-function tarball (spec, opts) {
- opts = optCheck(opts)
- const registry = fetch.pickRegistry(spec, opts)
- const stream = new PassThrough()
- let mani
- if (
- opts.resolved &&
- // spec.type === 'version' &&
- opts.resolved.indexOf(registry) === 0
- ) {
- // fakeChild is a shortcut to avoid looking up a manifest!
- mani = BB.resolve({
- name: spec.name,
- version: spec.fetchSpec,
- _integrity: opts.integrity,
- _resolved: opts.resolved,
- _fakeChild: true
- })
- } else {
- // We can't trust opts.resolved if it's going to a separate host.
- mani = manifest(spec, opts)
+ fs.readlink(base, function(err, target) {
+ if (!isWindows) seenLinks[id] = target;
+ gotTarget(err, target);
+ });
+ });
}
- mani.then(mani => {
- !mani._fakeChild && stream.emit('manifest', mani)
- const fetchStream = fromManifest(mani, spec, opts).on(
- 'integrity', i => stream.emit('integrity', i)
- )
- fetchStream.on('error', err => stream.emit('error', err))
- fetchStream.pipe(stream)
- return null
- }).catch(err => stream.emit('error', err))
- return stream
-}
+ function gotTarget(err, target, base) {
+ if (err) return cb(err);
-module.exports.fromManifest = fromManifest
-function fromManifest (manifest, spec, opts) {
- opts = optCheck(opts)
- if (spec.scope) { opts = opts.concat({ scope: spec.scope }) }
- const stream = new PassThrough()
- const registry = fetch.pickRegistry(spec, opts)
- const uri = getTarballUrl(spec, registry, manifest, opts)
- fetch(uri, opts.concat({
- headers: {
- 'pacote-req-type': 'tarball',
- 'pacote-pkg-id': `registry:${manifest.name}@${uri}`
- },
- integrity: manifest._integrity,
- algorithms: [
- manifest._integrity
- ? ssri.parse(manifest._integrity).pickAlgorithm()
- : 'sha1'
- ],
- spec
- }, opts))
- .then(res => {
- const hash = res.headers.get('x-local-cache-hash')
- if (hash) {
- stream.emit('integrity', decodeURIComponent(hash))
- }
- res.body.on('error', err => stream.emit('error', err))
- res.body.pipe(stream)
- return null
- })
- .catch(err => stream.emit('error', err))
- return stream
-}
+ var resolvedLink = pathModule.resolve(previous, target);
+ if (cache) cache[base] = resolvedLink;
+ gotResolvedLink(resolvedLink);
+ }
-function getTarballUrl (spec, registry, mani, opts) {
- const reg = url.parse(registry)
- const tarball = url.parse(mani._resolved)
- // https://github.com/npm/npm/pull/9471
- //
- // TL;DR: Some alternative registries host tarballs on http and packuments
- // on https, and vice-versa. There's also a case where people who can't use
- // SSL to access the npm registry, for example, might use
- // `--registry=http://registry.npmjs.org/`. In this case, we need to
- // rewrite `tarball` to match the protocol.
- //
- if (reg.hostname === tarball.hostname && reg.protocol !== tarball.protocol) {
- tarball.protocol = reg.protocol
- // Ports might be same host different protocol!
- if (reg.port !== tarball.port) {
- delete tarball.host
- tarball.port = reg.port
- }
- delete tarball.href
+ function gotResolvedLink(resolvedLink) {
+ // resolve the link, then start over
+ p = pathModule.resolve(resolvedLink, p.slice(pos));
+ start();
}
- return url.format(tarball)
-}
+};
/***/ }),
-/* 135 */,
-/* 136 */
+/* 118 */
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
-/*
- * Copyright The OpenTelemetry Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
Object.defineProperty(exports, "__esModule", { value: true });
-exports.MetricsAPI = void 0;
-var NoopMeterProvider_1 = __webpack_require__(450);
-var global_utils_1 = __webpack_require__(976);
-/**
- * Singleton object which represents the entry point to the OpenTelemetry Metrics API
- */
-var MetricsAPI = /** @class */ (function () {
- /** Empty private constructor prevents end users from constructing a new instance of the API */
- function MetricsAPI() {
- }
- /** Get the singleton instance of the Metrics API */
- MetricsAPI.getInstance = function () {
- if (!this._instance) {
- this._instance = new MetricsAPI();
- }
- return this._instance;
- };
+const utils_1 = __webpack_require__(280);
+// The default Buffer size if one is not provided.
+const DEFAULT_SMARTBUFFER_SIZE = 4096;
+// The default string encoding to use for reading/writing strings.
+const DEFAULT_SMARTBUFFER_ENCODING = 'utf8';
+class SmartBuffer {
/**
- * Set the current global meter. Returns the initialized global meter provider.
+ * Creates a new SmartBuffer instance.
+ *
+ * @param options { SmartBufferOptions } The SmartBufferOptions to apply to this instance.
*/
- MetricsAPI.prototype.setGlobalMeterProvider = function (provider) {
- if (global_utils_1._global[global_utils_1.GLOBAL_METRICS_API_KEY]) {
- // global meter provider has already been set
- return this.getMeterProvider();
+ constructor(options) {
+ this.length = 0;
+ this._encoding = DEFAULT_SMARTBUFFER_ENCODING;
+ this._writeOffset = 0;
+ this._readOffset = 0;
+ if (SmartBuffer.isSmartBufferOptions(options)) {
+ // Checks for encoding
+ if (options.encoding) {
+ utils_1.checkEncoding(options.encoding);
+ this._encoding = options.encoding;
+ }
+ // Checks for initial size length
+ if (options.size) {
+ if (utils_1.isFiniteInteger(options.size) && options.size > 0) {
+ this._buff = Buffer.allocUnsafe(options.size);
+ }
+ else {
+ throw new Error(utils_1.ERRORS.INVALID_SMARTBUFFER_SIZE);
+ }
+ // Check for initial Buffer
+ }
+ else if (options.buff) {
+ if (options.buff instanceof Buffer) {
+ this._buff = options.buff;
+ this.length = options.buff.length;
+ }
+ else {
+ throw new Error(utils_1.ERRORS.INVALID_SMARTBUFFER_BUFFER);
+ }
+ }
+ else {
+ this._buff = Buffer.allocUnsafe(DEFAULT_SMARTBUFFER_SIZE);
+ }
}
- global_utils_1._global[global_utils_1.GLOBAL_METRICS_API_KEY] = global_utils_1.makeGetter(global_utils_1.API_BACKWARDS_COMPATIBILITY_VERSION, provider, NoopMeterProvider_1.NOOP_METER_PROVIDER);
- return provider;
- };
+ else {
+ // If something was passed but it's not a SmartBufferOptions object
+ if (typeof options !== 'undefined') {
+ throw new Error(utils_1.ERRORS.INVALID_SMARTBUFFER_OBJECT);
+ }
+ // Otherwise default to sane options
+ this._buff = Buffer.allocUnsafe(DEFAULT_SMARTBUFFER_SIZE);
+ }
+ }
/**
- * Returns the global meter provider.
+ * Creates a new SmartBuffer instance with the provided internal Buffer size and optional encoding.
+ *
+ * @param size { Number } The size of the internal Buffer.
+ * @param encoding { String } The BufferEncoding to use for strings.
+ *
+ * @return { SmartBuffer }
*/
- MetricsAPI.prototype.getMeterProvider = function () {
- var _a, _b;
- return ((_b = (_a = global_utils_1._global[global_utils_1.GLOBAL_METRICS_API_KEY]) === null || _a === void 0 ? void 0 : _a.call(global_utils_1._global, global_utils_1.API_BACKWARDS_COMPATIBILITY_VERSION)) !== null && _b !== void 0 ? _b : NoopMeterProvider_1.NOOP_METER_PROVIDER);
- };
+ static fromSize(size, encoding) {
+ return new this({
+ size: size,
+ encoding: encoding
+ });
+ }
/**
- * Returns a meter from the global meter provider.
+ * Creates a new SmartBuffer instance with the provided Buffer and optional encoding.
+ *
+ * @param buffer { Buffer } The Buffer to use as the internal Buffer value.
+ * @param encoding { String } The BufferEncoding to use for strings.
+ *
+ * @return { SmartBuffer }
*/
- MetricsAPI.prototype.getMeter = function (name, version) {
- return this.getMeterProvider().getMeter(name, version);
- };
- /** Remove the global meter provider */
- MetricsAPI.prototype.disable = function () {
- delete global_utils_1._global[global_utils_1.GLOBAL_METRICS_API_KEY];
- };
- return MetricsAPI;
-}());
-exports.MetricsAPI = MetricsAPI;
-//# sourceMappingURL=metrics.js.map
-
-/***/ }),
-/* 137 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-var eos = __webpack_require__(3)
-var shift = __webpack_require__(475)
-
-module.exports = each
-
-function each (stream, fn, cb) {
- var want = true
- var error = null
- var ended = false
- var running = false
- var calling = false
-
- stream.on('readable', onreadable)
- onreadable()
-
- if (cb) eos(stream, {readable: true, writable: false}, done)
- return stream
-
- function done (err) {
- if (!error) error = err
- ended = true
- if (!running) cb(error)
- }
-
- function onreadable () {
- if (want) read()
- }
-
- function afterRead (err) {
- running = false
-
- if (err) {
- error = err
- if (ended) return cb(error)
- stream.destroy(err)
- return
+ static fromBuffer(buff, encoding) {
+ return new this({
+ buff: buff,
+ encoding: encoding
+ });
}
- if (ended) return cb(error)
- if (!calling) read()
- }
-
- function read () {
- while (!running && !ended) {
- want = false
-
- var data = shift(stream)
- if (ended) return
- if (data === null) {
- want = true
- return
- }
-
- running = true
- calling = true
- fn(data, afterRead)
- calling = false
+ /**
+ * Creates a new SmartBuffer instance with the provided SmartBufferOptions options.
+ *
+ * @param options { SmartBufferOptions } The options to use when creating the SmartBuffer instance.
+ */
+ static fromOptions(options) {
+ return new this(options);
}
- }
-}
-
-
-/***/ }),
-/* 138 */,
-/* 139 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-// Unique ID creation requires a high quality random # generator. In node.js
-// this is pretty straight-forward - we use the crypto API.
-
-var crypto = __webpack_require__(417);
-
-module.exports = function nodeRNG() {
- return crypto.randomBytes(16);
-};
-
-
-/***/ }),
-/* 140 */
-/***/ (function(module) {
-
-"use strict";
-
-
-function isArguments (thingy) {
- return thingy != null && typeof thingy === 'object' && thingy.hasOwnProperty('callee')
-}
-
-var types = {
- '*': {label: 'any', check: function () { return true }},
- A: {label: 'array', check: function (thingy) { return Array.isArray(thingy) || isArguments(thingy) }},
- S: {label: 'string', check: function (thingy) { return typeof thingy === 'string' }},
- N: {label: 'number', check: function (thingy) { return typeof thingy === 'number' }},
- F: {label: 'function', check: function (thingy) { return typeof thingy === 'function' }},
- O: {label: 'object', check: function (thingy) { return typeof thingy === 'object' && thingy != null && !types.A.check(thingy) && !types.E.check(thingy) }},
- B: {label: 'boolean', check: function (thingy) { return typeof thingy === 'boolean' }},
- E: {label: 'error', check: function (thingy) { return thingy instanceof Error }},
- Z: {label: 'null', check: function (thingy) { return thingy == null }}
-}
-
-function addSchema (schema, arity) {
- var group = arity[schema.length] = arity[schema.length] || []
- if (group.indexOf(schema) === -1) group.push(schema)
-}
-
-var validate = module.exports = function (rawSchemas, args) {
- if (arguments.length !== 2) throw wrongNumberOfArgs(['SA'], arguments.length)
- if (!rawSchemas) throw missingRequiredArg(0, 'rawSchemas')
- if (!args) throw missingRequiredArg(1, 'args')
- if (!types.S.check(rawSchemas)) throw invalidType(0, ['string'], rawSchemas)
- if (!types.A.check(args)) throw invalidType(1, ['array'], args)
- var schemas = rawSchemas.split('|')
- var arity = {}
-
- schemas.forEach(function (schema) {
- for (var ii = 0; ii < schema.length; ++ii) {
- var type = schema[ii]
- if (!types[type]) throw unknownType(ii, type)
+ /**
+ * Type checking function that determines if an object is a SmartBufferOptions object.
+ */
+ static isSmartBufferOptions(options) {
+ const castOptions = options;
+ return (castOptions &&
+ (castOptions.encoding !== undefined || castOptions.size !== undefined || castOptions.buff !== undefined));
}
- if (/E.*E/.test(schema)) throw moreThanOneError(schema)
- addSchema(schema, arity)
- if (/E/.test(schema)) {
- addSchema(schema.replace(/E.*$/, 'E'), arity)
- addSchema(schema.replace(/E/, 'Z'), arity)
- if (schema.length === 1) addSchema('', arity)
+ // Signed integers
+ /**
+ * Reads an Int8 value from the current read position or an optionally provided offset.
+ *
+ * @param offset { Number } The offset to read data from (optional)
+ * @return { Number }
+ */
+ readInt8(offset) {
+ return this._readNumberValue(Buffer.prototype.readInt8, 1, offset);
}
- })
- var matching = arity[args.length]
- if (!matching) {
- throw wrongNumberOfArgs(Object.keys(arity), args.length)
- }
- for (var ii = 0; ii < args.length; ++ii) {
- var newMatching = matching.filter(function (schema) {
- var type = schema[ii]
- var typeCheck = types[type].check
- return typeCheck(args[ii])
- })
- if (!newMatching.length) {
- var labels = matching.map(function (schema) {
- return types[schema[ii]].label
- }).filter(function (schema) { return schema != null })
- throw invalidType(ii, labels, args[ii])
+ /**
+ * Reads an Int16BE value from the current read position or an optionally provided offset.
+ *
+ * @param offset { Number } The offset to read data from (optional)
+ * @return { Number }
+ */
+ readInt16BE(offset) {
+ return this._readNumberValue(Buffer.prototype.readInt16BE, 2, offset);
}
- matching = newMatching
- }
-}
-
-function missingRequiredArg (num) {
- return newException('EMISSINGARG', 'Missing required argument #' + (num + 1))
-}
-
-function unknownType (num, type) {
- return newException('EUNKNOWNTYPE', 'Unknown type ' + type + ' in argument #' + (num + 1))
-}
-
-function invalidType (num, expectedTypes, value) {
- var valueType
- Object.keys(types).forEach(function (typeCode) {
- if (types[typeCode].check(value)) valueType = types[typeCode].label
- })
- return newException('EINVALIDTYPE', 'Argument #' + (num + 1) + ': Expected ' +
- englishList(expectedTypes) + ' but got ' + valueType)
-}
-
-function englishList (list) {
- return list.join(', ').replace(/, ([^,]+)$/, ' or $1')
-}
-
-function wrongNumberOfArgs (expected, got) {
- var english = englishList(expected)
- var args = expected.every(function (ex) { return ex.length === 1 })
- ? 'argument'
- : 'arguments'
- return newException('EWRONGARGCOUNT', 'Expected ' + english + ' ' + args + ' but got ' + got)
-}
-
-function moreThanOneError (schema) {
- return newException('ETOOMANYERRORTYPES',
- 'Only one error type per argument signature is allowed, more than one found in "' + schema + '"')
-}
-
-function newException (code, msg) {
- var e = new Error(msg)
- e.code = code
- if (Error.captureStackTrace) Error.captureStackTrace(e, validate)
- return e
-}
-
-
-/***/ }),
-/* 141 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-
-var net = __webpack_require__(631);
-var tls = __webpack_require__(16);
-var http = __webpack_require__(605);
-var https = __webpack_require__(211);
-var events = __webpack_require__(614);
-var assert = __webpack_require__(357);
-var util = __webpack_require__(669);
-
-
-exports.httpOverHttp = httpOverHttp;
-exports.httpsOverHttp = httpsOverHttp;
-exports.httpOverHttps = httpOverHttps;
-exports.httpsOverHttps = httpsOverHttps;
-
-
-function httpOverHttp(options) {
- var agent = new TunnelingAgent(options);
- agent.request = http.request;
- return agent;
-}
-
-function httpsOverHttp(options) {
- var agent = new TunnelingAgent(options);
- agent.request = http.request;
- agent.createSocket = createSecureSocket;
- agent.defaultPort = 443;
- return agent;
-}
-
-function httpOverHttps(options) {
- var agent = new TunnelingAgent(options);
- agent.request = https.request;
- return agent;
-}
-
-function httpsOverHttps(options) {
- var agent = new TunnelingAgent(options);
- agent.request = https.request;
- agent.createSocket = createSecureSocket;
- agent.defaultPort = 443;
- return agent;
-}
-
-
-function TunnelingAgent(options) {
- var self = this;
- self.options = options || {};
- self.proxyOptions = self.options.proxy || {};
- self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;
- self.requests = [];
- self.sockets = [];
-
- self.on('free', function onFree(socket, host, port, localAddress) {
- var options = toOptions(host, port, localAddress);
- for (var i = 0, len = self.requests.length; i < len; ++i) {
- var pending = self.requests[i];
- if (pending.host === options.host && pending.port === options.port) {
- // Detect the request to connect same origin server,
- // reuse the connection.
- self.requests.splice(i, 1);
- pending.request.onSocket(socket);
- return;
- }
+ /**
+ * Reads an Int16LE value from the current read position or an optionally provided offset.
+ *
+ * @param offset { Number } The offset to read data from (optional)
+ * @return { Number }
+ */
+ readInt16LE(offset) {
+ return this._readNumberValue(Buffer.prototype.readInt16LE, 2, offset);
}
- socket.destroy();
- self.removeSocket(socket);
- });
-}
-util.inherits(TunnelingAgent, events.EventEmitter);
-
-TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {
- var self = this;
- var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));
-
- if (self.sockets.length >= this.maxSockets) {
- // We are over limit so we'll add it to the queue.
- self.requests.push(options);
- return;
- }
-
- // If we are under maxSockets create a new one.
- self.createSocket(options, function(socket) {
- socket.on('free', onFree);
- socket.on('close', onCloseOrRemove);
- socket.on('agentRemove', onCloseOrRemove);
- req.onSocket(socket);
-
- function onFree() {
- self.emit('free', socket, options);
+ /**
+ * Reads an Int32BE value from the current read position or an optionally provided offset.
+ *
+ * @param offset { Number } The offset to read data from (optional)
+ * @return { Number }
+ */
+ readInt32BE(offset) {
+ return this._readNumberValue(Buffer.prototype.readInt32BE, 4, offset);
}
-
- function onCloseOrRemove(err) {
- self.removeSocket(socket);
- socket.removeListener('free', onFree);
- socket.removeListener('close', onCloseOrRemove);
- socket.removeListener('agentRemove', onCloseOrRemove);
+ /**
+ * Reads an Int32LE value from the current read position or an optionally provided offset.
+ *
+ * @param offset { Number } The offset to read data from (optional)
+ * @return { Number }
+ */
+ readInt32LE(offset) {
+ return this._readNumberValue(Buffer.prototype.readInt32LE, 4, offset);
}
- });
-};
-
-TunnelingAgent.prototype.createSocket = function createSocket(options, cb) {
- var self = this;
- var placeholder = {};
- self.sockets.push(placeholder);
-
- var connectOptions = mergeOptions({}, self.proxyOptions, {
- method: 'CONNECT',
- path: options.host + ':' + options.port,
- agent: false,
- headers: {
- host: options.host + ':' + options.port
+ /**
+ * Reads a BigInt64BE value from the current read position or an optionally provided offset.
+ *
+ * @param offset { Number } The offset to read data from (optional)
+ * @return { BigInt }
+ */
+ readBigInt64BE(offset) {
+ utils_1.bigIntAndBufferInt64Check('readBigInt64BE');
+ return this._readNumberValue(Buffer.prototype.readBigInt64BE, 8, offset);
}
- });
- if (options.localAddress) {
- connectOptions.localAddress = options.localAddress;
- }
- if (connectOptions.proxyAuth) {
- connectOptions.headers = connectOptions.headers || {};
- connectOptions.headers['Proxy-Authorization'] = 'Basic ' +
- new Buffer(connectOptions.proxyAuth).toString('base64');
- }
-
- debug('making CONNECT request');
- var connectReq = self.request(connectOptions);
- connectReq.useChunkedEncodingByDefault = false; // for v0.6
- connectReq.once('response', onResponse); // for v0.6
- connectReq.once('upgrade', onUpgrade); // for v0.6
- connectReq.once('connect', onConnect); // for v0.7 or later
- connectReq.once('error', onError);
- connectReq.end();
-
- function onResponse(res) {
- // Very hacky. This is necessary to avoid http-parser leaks.
- res.upgrade = true;
- }
-
- function onUpgrade(res, socket, head) {
- // Hacky.
- process.nextTick(function() {
- onConnect(res, socket, head);
- });
- }
-
- function onConnect(res, socket, head) {
- connectReq.removeAllListeners();
- socket.removeAllListeners();
-
- if (res.statusCode !== 200) {
- debug('tunneling socket could not be established, statusCode=%d',
- res.statusCode);
- socket.destroy();
- var error = new Error('tunneling socket could not be established, ' +
- 'statusCode=' + res.statusCode);
- error.code = 'ECONNRESET';
- options.request.emit('error', error);
- self.removeSocket(placeholder);
- return;
+ /**
+ * Reads a BigInt64LE value from the current read position or an optionally provided offset.
+ *
+ * @param offset { Number } The offset to read data from (optional)
+ * @return { BigInt }
+ */
+ readBigInt64LE(offset) {
+ utils_1.bigIntAndBufferInt64Check('readBigInt64LE');
+ return this._readNumberValue(Buffer.prototype.readBigInt64LE, 8, offset);
}
- if (head.length > 0) {
- debug('got illegal response body from proxy');
- socket.destroy();
- var error = new Error('got illegal response body from proxy');
- error.code = 'ECONNRESET';
- options.request.emit('error', error);
- self.removeSocket(placeholder);
- return;
+ /**
+ * Writes an Int8 value to the current write position (or at optional offset).
+ *
+ * @param value { Number } The value to write.
+ * @param offset { Number } The offset to write the value at.
+ *
+ * @return this
+ */
+ writeInt8(value, offset) {
+ this._writeNumberValue(Buffer.prototype.writeInt8, 1, value, offset);
+ return this;
}
- debug('tunneling connection has established');
- self.sockets[self.sockets.indexOf(placeholder)] = socket;
- return cb(socket);
- }
-
- function onError(cause) {
- connectReq.removeAllListeners();
-
- debug('tunneling socket could not be established, cause=%s\n',
- cause.message, cause.stack);
- var error = new Error('tunneling socket could not be established, ' +
- 'cause=' + cause.message);
- error.code = 'ECONNRESET';
- options.request.emit('error', error);
- self.removeSocket(placeholder);
- }
-};
-
-TunnelingAgent.prototype.removeSocket = function removeSocket(socket) {
- var pos = this.sockets.indexOf(socket)
- if (pos === -1) {
- return;
- }
- this.sockets.splice(pos, 1);
-
- var pending = this.requests.shift();
- if (pending) {
- // If we have pending requests and a socket gets closed a new one
- // needs to be created to take over in the pool for the one that closed.
- this.createSocket(pending, function(socket) {
- pending.request.onSocket(socket);
- });
- }
-};
-
-function createSecureSocket(options, cb) {
- var self = this;
- TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {
- var hostHeader = options.request.getHeader('host');
- var tlsOptions = mergeOptions({}, self.options, {
- socket: socket,
- servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host
- });
-
- // 0 is dummy port for v0.6
- var secureSocket = tls.connect(0, tlsOptions);
- self.sockets[self.sockets.indexOf(socket)] = secureSocket;
- cb(secureSocket);
- });
-}
-
-
-function toOptions(host, port, localAddress) {
- if (typeof host === 'string') { // since v0.10
- return {
- host: host,
- port: port,
- localAddress: localAddress
- };
- }
- return host; // for v0.11 or later
-}
-
-function mergeOptions(target) {
- for (var i = 1, len = arguments.length; i < len; ++i) {
- var overrides = arguments[i];
- if (typeof overrides === 'object') {
- var keys = Object.keys(overrides);
- for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {
- var k = keys[j];
- if (overrides[k] !== undefined) {
- target[k] = overrides[k];
- }
- }
+ /**
+ * Inserts an Int8 value at the given offset value.
+ *
+ * @param value { Number } The value to insert.
+ * @param offset { Number } The offset to insert the value at.
+ *
+ * @return this
+ */
+ insertInt8(value, offset) {
+ return this._insertNumberValue(Buffer.prototype.writeInt8, 1, value, offset);
}
- }
- return target;
-}
-
-
-var debug;
-if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) {
- debug = function() {
- var args = Array.prototype.slice.call(arguments);
- if (typeof args[0] === 'string') {
- args[0] = 'TUNNEL: ' + args[0];
- } else {
- args.unshift('TUNNEL:');
+ /**
+ * Writes an Int16BE value to the current write position (or at optional offset).
+ *
+ * @param value { Number } The value to write.
+ * @param offset { Number } The offset to write the value at.
+ *
+ * @return this
+ */
+ writeInt16BE(value, offset) {
+ return this._writeNumberValue(Buffer.prototype.writeInt16BE, 2, value, offset);
}
- console.error.apply(console, args);
- }
-} else {
- debug = function() {};
-}
-exports.debug = debug; // for test
-
-
-/***/ }),
-/* 142 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-module.exports = which
-which.sync = whichSync
-
-var isWindows = process.platform === 'win32' ||
- process.env.OSTYPE === 'cygwin' ||
- process.env.OSTYPE === 'msys'
-
-var path = __webpack_require__(622)
-var COLON = isWindows ? ';' : ':'
-var isexe = __webpack_require__(742)
-
-function getNotFoundError (cmd) {
- var er = new Error('not found: ' + cmd)
- er.code = 'ENOENT'
-
- return er
-}
-
-function getPathInfo (cmd, opt) {
- var colon = opt.colon || COLON
- var pathEnv = opt.path || process.env.PATH || ''
- var pathExt = ['']
-
- pathEnv = pathEnv.split(colon)
-
- var pathExtExe = ''
- if (isWindows) {
- pathEnv.unshift(process.cwd())
- pathExtExe = (opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM')
- pathExt = pathExtExe.split(colon)
-
-
- // Always test the cmd itself first. isexe will check to make sure
- // it's found in the pathExt set.
- if (cmd.indexOf('.') !== -1 && pathExt[0] !== '')
- pathExt.unshift('')
- }
-
- // If it has a slash, then we don't bother searching the pathenv.
- // just check the file itself, and that's it.
- if (cmd.match(/\//) || isWindows && cmd.match(/\\/))
- pathEnv = ['']
-
- return {
- env: pathEnv,
- ext: pathExt,
- extExe: pathExtExe
- }
-}
-
-function which (cmd, opt, cb) {
- if (typeof opt === 'function') {
- cb = opt
- opt = {}
- }
-
- var info = getPathInfo(cmd, opt)
- var pathEnv = info.env
- var pathExt = info.ext
- var pathExtExe = info.extExe
- var found = []
-
- ;(function F (i, l) {
- if (i === l) {
- if (opt.all && found.length)
- return cb(null, found)
- else
- return cb(getNotFoundError(cmd))
+ /**
+ * Inserts an Int16BE value at the given offset value.
+ *
+ * @param value { Number } The value to insert.
+ * @param offset { Number } The offset to insert the value at.
+ *
+ * @return this
+ */
+ insertInt16BE(value, offset) {
+ return this._insertNumberValue(Buffer.prototype.writeInt16BE, 2, value, offset);
}
-
- var pathPart = pathEnv[i]
- if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"')
- pathPart = pathPart.slice(1, -1)
-
- var p = path.join(pathPart, cmd)
- if (!pathPart && (/^\.[\\\/]/).test(cmd)) {
- p = cmd.slice(0, 2) + p
+ /**
+ * Writes an Int16LE value to the current write position (or at optional offset).
+ *
+ * @param value { Number } The value to write.
+ * @param offset { Number } The offset to write the value at.
+ *
+ * @return this
+ */
+ writeInt16LE(value, offset) {
+ return this._writeNumberValue(Buffer.prototype.writeInt16LE, 2, value, offset);
}
- ;(function E (ii, ll) {
- if (ii === ll) return F(i + 1, l)
- var ext = pathExt[ii]
- isexe(p + ext, { pathExt: pathExtExe }, function (er, is) {
- if (!er && is) {
- if (opt.all)
- found.push(p + ext)
- else
- return cb(null, p + ext)
- }
- return E(ii + 1, ll)
- })
- })(0, pathExt.length)
- })(0, pathEnv.length)
-}
-
-function whichSync (cmd, opt) {
- opt = opt || {}
-
- var info = getPathInfo(cmd, opt)
- var pathEnv = info.env
- var pathExt = info.ext
- var pathExtExe = info.extExe
- var found = []
-
- for (var i = 0, l = pathEnv.length; i < l; i ++) {
- var pathPart = pathEnv[i]
- if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"')
- pathPart = pathPart.slice(1, -1)
-
- var p = path.join(pathPart, cmd)
- if (!pathPart && /^\.[\\\/]/.test(cmd)) {
- p = cmd.slice(0, 2) + p
+ /**
+ * Inserts an Int16LE value at the given offset value.
+ *
+ * @param value { Number } The value to insert.
+ * @param offset { Number } The offset to insert the value at.
+ *
+ * @return this
+ */
+ insertInt16LE(value, offset) {
+ return this._insertNumberValue(Buffer.prototype.writeInt16LE, 2, value, offset);
}
- for (var j = 0, ll = pathExt.length; j < ll; j ++) {
- var cur = p + pathExt[j]
- var is
- try {
- is = isexe.sync(cur, { pathExt: pathExtExe })
- if (is) {
- if (opt.all)
- found.push(cur)
- else
- return cur
- }
- } catch (ex) {}
+ /**
+ * Writes an Int32BE value to the current write position (or at optional offset).
+ *
+ * @param value { Number } The value to write.
+ * @param offset { Number } The offset to write the value at.
+ *
+ * @return this
+ */
+ writeInt32BE(value, offset) {
+ return this._writeNumberValue(Buffer.prototype.writeInt32BE, 4, value, offset);
}
- }
-
- if (opt.all && found.length)
- return found
-
- if (opt.nothrow)
- return null
-
- throw getNotFoundError(cmd)
-}
-
-
-/***/ }),
-/* 143 */,
-/* 144 */
-/***/ (function(module) {
-
-/*! *****************************************************************************
-Copyright (c) Microsoft Corporation.
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
-REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
-AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
-INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
-LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
-OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-PERFORMANCE OF THIS SOFTWARE.
-***************************************************************************** */
-/* global global, define, System, Reflect, Promise */
-var __extends;
-var __assign;
-var __rest;
-var __decorate;
-var __param;
-var __metadata;
-var __awaiter;
-var __generator;
-var __exportStar;
-var __values;
-var __read;
-var __spread;
-var __spreadArrays;
-var __await;
-var __asyncGenerator;
-var __asyncDelegator;
-var __asyncValues;
-var __makeTemplateObject;
-var __importStar;
-var __importDefault;
-var __classPrivateFieldGet;
-var __classPrivateFieldSet;
-var __createBinding;
-(function (factory) {
- var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {};
- if (typeof define === "function" && define.amd) {
- define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); });
- }
- else if ( true && typeof module.exports === "object") {
- factory(createExporter(root, createExporter(module.exports)));
- }
- else {
- factory(createExporter(root));
- }
- function createExporter(exports, previous) {
- if (exports !== root) {
- if (typeof Object.create === "function") {
- Object.defineProperty(exports, "__esModule", { value: true });
- }
- else {
- exports.__esModule = true;
- }
- }
- return function (id, v) { return exports[id] = previous ? previous(id, v) : v; };
- }
-})
-(function (exporter) {
- var extendStatics = Object.setPrototypeOf ||
- ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
- function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
-
- __extends = function (d, b) {
- extendStatics(d, b);
- function __() { this.constructor = d; }
- d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
- };
-
- __assign = Object.assign || function (t) {
- for (var s, i = 1, n = arguments.length; i < n; i++) {
- s = arguments[i];
- for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];
- }
- return t;
- };
-
- __rest = function (s, e) {
- var t = {};
- for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
- t[p] = s[p];
- if (s != null && typeof Object.getOwnPropertySymbols === "function")
- for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
- if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
- t[p[i]] = s[p[i]];
- }
- return t;
- };
-
- __decorate = function (decorators, target, key, desc) {
- var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
- if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
- else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
- return c > 3 && r && Object.defineProperty(target, key, r), r;
- };
-
- __param = function (paramIndex, decorator) {
- return function (target, key) { decorator(target, key, paramIndex); }
- };
-
- __metadata = function (metadataKey, metadataValue) {
- if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue);
- };
-
- __awaiter = function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
- };
-
- __generator = function (thisArg, body) {
- var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
- return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
- function verb(n) { return function (v) { return step([n, v]); }; }
- function step(op) {
- if (f) throw new TypeError("Generator is already executing.");
- while (_) try {
- if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
- if (y = 0, t) op = [op[0] & 2, t.value];
- switch (op[0]) {
- case 0: case 1: t = op; break;
- case 4: _.label++; return { value: op[1], done: false };
- case 5: _.label++; y = op[1]; op = [0]; continue;
- case 7: op = _.ops.pop(); _.trys.pop(); continue;
- default:
- if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
- if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
- if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
- if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
- if (t[2]) _.ops.pop();
- _.trys.pop(); continue;
- }
- op = body.call(thisArg, _);
- } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
- if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
- }
- };
-
- __exportStar = function(m, o) {
- for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);
- };
-
- __createBinding = Object.create ? (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
- }) : (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- o[k2] = m[k];
- });
-
- __values = function (o) {
- var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0;
- if (m) return m.call(o);
- if (o && typeof o.length === "number") return {
- next: function () {
- if (o && i >= o.length) o = void 0;
- return { value: o && o[i++], done: !o };
- }
- };
- throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined.");
- };
-
- __read = function (o, n) {
- var m = typeof Symbol === "function" && o[Symbol.iterator];
- if (!m) return o;
- var i = m.call(o), r, ar = [], e;
- try {
- while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);
- }
- catch (error) { e = { error: error }; }
- finally {
- try {
- if (r && !r.done && (m = i["return"])) m.call(i);
- }
- finally { if (e) throw e.error; }
- }
- return ar;
- };
-
- __spread = function () {
- for (var ar = [], i = 0; i < arguments.length; i++)
- ar = ar.concat(__read(arguments[i]));
- return ar;
- };
-
- __spreadArrays = function () {
- for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;
- for (var r = Array(s), k = 0, i = 0; i < il; i++)
- for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)
- r[k] = a[j];
- return r;
- };
-
- __await = function (v) {
- return this instanceof __await ? (this.v = v, this) : new __await(v);
- };
-
- __asyncGenerator = function (thisArg, _arguments, generator) {
- if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
- var g = generator.apply(thisArg, _arguments || []), i, q = [];
- return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
- function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
- function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
- function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
- function fulfill(value) { resume("next", value); }
- function reject(value) { resume("throw", value); }
- function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
- };
-
- __asyncDelegator = function (o) {
- var i, p;
- return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i;
- function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; }
- };
-
- __asyncValues = function (o) {
- if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
- var m = o[Symbol.asyncIterator], i;
- return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
- function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
- function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
- };
-
- __makeTemplateObject = function (cooked, raw) {
- if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; }
- return cooked;
- };
-
- var __setModuleDefault = Object.create ? (function(o, v) {
- Object.defineProperty(o, "default", { enumerable: true, value: v });
- }) : function(o, v) {
- o["default"] = v;
- };
-
- __importStar = function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
- __setModuleDefault(result, mod);
- return result;
- };
-
- __importDefault = function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
- };
-
- __classPrivateFieldGet = function (receiver, privateMap) {
- if (!privateMap.has(receiver)) {
- throw new TypeError("attempted to get private field on non-instance");
- }
- return privateMap.get(receiver);
- };
-
- __classPrivateFieldSet = function (receiver, privateMap, value) {
- if (!privateMap.has(receiver)) {
- throw new TypeError("attempted to set private field on non-instance");
- }
- privateMap.set(receiver, value);
- return value;
- };
-
- exporter("__extends", __extends);
- exporter("__assign", __assign);
- exporter("__rest", __rest);
- exporter("__decorate", __decorate);
- exporter("__param", __param);
- exporter("__metadata", __metadata);
- exporter("__awaiter", __awaiter);
- exporter("__generator", __generator);
- exporter("__exportStar", __exportStar);
- exporter("__createBinding", __createBinding);
- exporter("__values", __values);
- exporter("__read", __read);
- exporter("__spread", __spread);
- exporter("__spreadArrays", __spreadArrays);
- exporter("__await", __await);
- exporter("__asyncGenerator", __asyncGenerator);
- exporter("__asyncDelegator", __asyncDelegator);
- exporter("__asyncValues", __asyncValues);
- exporter("__makeTemplateObject", __makeTemplateObject);
- exporter("__importStar", __importStar);
- exporter("__importDefault", __importDefault);
- exporter("__classPrivateFieldGet", __classPrivateFieldGet);
- exporter("__classPrivateFieldSet", __classPrivateFieldSet);
-});
-
-
-/***/ }),
-/* 145 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-"use strict";
-
-const pump = __webpack_require__(284);
-const bufferStream = __webpack_require__(927);
-
-class MaxBufferError extends Error {
- constructor() {
- super('maxBuffer exceeded');
- this.name = 'MaxBufferError';
- }
-}
-
-function getStream(inputStream, options) {
- if (!inputStream) {
- return Promise.reject(new Error('Expected a stream'));
- }
-
- options = Object.assign({maxBuffer: Infinity}, options);
-
- const {maxBuffer} = options;
-
- let stream;
- return new Promise((resolve, reject) => {
- const rejectPromise = error => {
- if (error) { // A null check
- error.bufferedData = stream.getBufferedValue();
- }
- reject(error);
- };
-
- stream = pump(inputStream, bufferStream(options), error => {
- if (error) {
- rejectPromise(error);
- return;
- }
-
- resolve();
- });
-
- stream.on('data', () => {
- if (stream.getBufferedLength() > maxBuffer) {
- rejectPromise(new MaxBufferError());
- }
- });
- }).then(() => stream.getBufferedValue());
-}
-
-module.exports = getStream;
-module.exports.buffer = (stream, options) => getStream(stream, Object.assign({}, options, {encoding: 'buffer'}));
-module.exports.array = (stream, options) => getStream(stream, Object.assign({}, options, {array: true}));
-module.exports.MaxBufferError = MaxBufferError;
-
-
-/***/ }),
-/* 146 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-"use strict";
-/**
- * refer:
- * * @atimb "Real keep-alive HTTP agent": https://gist.github.com/2963672
- * * https://github.com/joyent/node/blob/master/lib/http.js
- * * https://github.com/joyent/node/blob/master/lib/https.js
- * * https://github.com/joyent/node/blob/master/lib/_http_agent.js
- */
-
-
-
-const OriginalAgent = __webpack_require__(998).Agent;
-const ms = __webpack_require__(337);
-
-class Agent extends OriginalAgent {
- constructor(options) {
- options = options || {};
- options.keepAlive = options.keepAlive !== false;
- // default is keep-alive and 15s free socket timeout
- if (options.freeSocketKeepAliveTimeout === undefined) {
- options.freeSocketKeepAliveTimeout = 15000;
+ /**
+ * Inserts an Int32BE value at the given offset value.
+ *
+ * @param value { Number } The value to insert.
+ * @param offset { Number } The offset to insert the value at.
+ *
+ * @return this
+ */
+ insertInt32BE(value, offset) {
+ return this._insertNumberValue(Buffer.prototype.writeInt32BE, 4, value, offset);
}
- // Legacy API: keepAliveTimeout should be rename to `freeSocketKeepAliveTimeout`
- if (options.keepAliveTimeout) {
- options.freeSocketKeepAliveTimeout = options.keepAliveTimeout;
+ /**
+ * Writes an Int32LE value to the current write position (or at optional offset).
+ *
+ * @param value { Number } The value to write.
+ * @param offset { Number } The offset to write the value at.
+ *
+ * @return this
+ */
+ writeInt32LE(value, offset) {
+ return this._writeNumberValue(Buffer.prototype.writeInt32LE, 4, value, offset);
}
- options.freeSocketKeepAliveTimeout = ms(options.freeSocketKeepAliveTimeout);
-
- // Sets the socket to timeout after timeout milliseconds of inactivity on the socket.
- // By default is double free socket keepalive timeout.
- if (options.timeout === undefined) {
- options.timeout = options.freeSocketKeepAliveTimeout * 2;
- // make sure socket default inactivity timeout >= 30s
- if (options.timeout < 30000) {
- options.timeout = 30000;
- }
+ /**
+ * Inserts an Int32LE value at the given offset value.
+ *
+ * @param value { Number } The value to insert.
+ * @param offset { Number } The offset to insert the value at.
+ *
+ * @return this
+ */
+ insertInt32LE(value, offset) {
+ return this._insertNumberValue(Buffer.prototype.writeInt32LE, 4, value, offset);
}
- options.timeout = ms(options.timeout);
-
- super(options);
-
- this.createSocketCount = 0;
- this.createSocketCountLastCheck = 0;
-
- this.createSocketErrorCount = 0;
- this.createSocketErrorCountLastCheck = 0;
-
- this.closeSocketCount = 0;
- this.closeSocketCountLastCheck = 0;
-
- // socket error event count
- this.errorSocketCount = 0;
- this.errorSocketCountLastCheck = 0;
-
- this.requestCount = 0;
- this.requestCountLastCheck = 0;
-
- this.timeoutSocketCount = 0;
- this.timeoutSocketCountLastCheck = 0;
-
- this.on('free', s => {
- this.requestCount++;
- // last enter free queue timestamp
- s.lastFreeTime = Date.now();
- });
- this.on('timeout', () => {
- this.timeoutSocketCount++;
- });
- this.on('close', () => {
- this.closeSocketCount++;
- });
- this.on('error', () => {
- this.errorSocketCount++;
- });
- }
-
- createSocket(req, options, cb) {
- super.createSocket(req, options, (err, socket) => {
- if (err) {
- this.createSocketErrorCount++;
- return cb(err);
- }
- if (this.keepAlive) {
- // Disable Nagle's algorithm: http://blog.caustik.com/2012/04/08/scaling-node-js-to-100k-concurrent-connections/
- // https://fengmk2.com/benchmark/nagle-algorithm-delayed-ack-mock.html
- socket.setNoDelay(true);
- }
- this.createSocketCount++;
- cb(null, socket);
- });
- }
-
- get statusChanged() {
- const changed = this.createSocketCount !== this.createSocketCountLastCheck ||
- this.createSocketErrorCount !== this.createSocketErrorCountLastCheck ||
- this.closeSocketCount !== this.closeSocketCountLastCheck ||
- this.errorSocketCount !== this.errorSocketCountLastCheck ||
- this.timeoutSocketCount !== this.timeoutSocketCountLastCheck ||
- this.requestCount !== this.requestCountLastCheck;
- if (changed) {
- this.createSocketCountLastCheck = this.createSocketCount;
- this.createSocketErrorCountLastCheck = this.createSocketErrorCount;
- this.closeSocketCountLastCheck = this.closeSocketCount;
- this.errorSocketCountLastCheck = this.errorSocketCount;
- this.timeoutSocketCountLastCheck = this.timeoutSocketCount;
- this.requestCountLastCheck = this.requestCount;
+ /**
+ * Writes a BigInt64BE value to the current write position (or at optional offset).
+ *
+ * @param value { BigInt } The value to write.
+ * @param offset { Number } The offset to write the value at.
+ *
+ * @return this
+ */
+ writeBigInt64BE(value, offset) {
+ utils_1.bigIntAndBufferInt64Check('writeBigInt64BE');
+ return this._writeNumberValue(Buffer.prototype.writeBigInt64BE, 8, value, offset);
}
- return changed;
- }
-
- getCurrentStatus() {
- return {
- createSocketCount: this.createSocketCount,
- createSocketErrorCount: this.createSocketErrorCount,
- closeSocketCount: this.closeSocketCount,
- errorSocketCount: this.errorSocketCount,
- timeoutSocketCount: this.timeoutSocketCount,
- requestCount: this.requestCount,
- freeSockets: inspect(this.freeSockets),
- sockets: inspect(this.sockets),
- requests: inspect(this.requests),
- };
- }
-}
-
-module.exports = Agent;
-
-function inspect(obj) {
- const res = {};
- for (const key in obj) {
- res[key] = obj[key].length;
- }
- return res;
-}
-
-
-/***/ }),
-/* 147 */
-/***/ (function(__unusedmodule, exports) {
-
-"use strict";
-
-
-exports.fromCallback = function (fn) {
- return Object.defineProperty(function () {
- if (typeof arguments[arguments.length - 1] === 'function') fn.apply(this, arguments)
- else {
- return new Promise((resolve, reject) => {
- arguments[arguments.length] = (err, res) => {
- if (err) return reject(err)
- resolve(res)
- }
- arguments.length++
- fn.apply(this, arguments)
- })
+ /**
+ * Inserts a BigInt64BE value at the given offset value.
+ *
+ * @param value { BigInt } The value to insert.
+ * @param offset { Number } The offset to insert the value at.
+ *
+ * @return this
+ */
+ insertBigInt64BE(value, offset) {
+ utils_1.bigIntAndBufferInt64Check('writeBigInt64BE');
+ return this._insertNumberValue(Buffer.prototype.writeBigInt64BE, 8, value, offset);
}
- }, 'name', { value: fn.name })
-}
-
-exports.fromPromise = function (fn) {
- return Object.defineProperty(function () {
- const cb = arguments[arguments.length - 1]
- if (typeof cb !== 'function') return fn.apply(this, arguments)
- else fn.apply(this, arguments).then(r => cb(null, r), cb)
- }, 'name', { value: fn.name })
-}
-
-
-/***/ }),
-/* 148 */,
-/* 149 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-"use strict";
-
-module.exports =
- function(Promise, PromiseArray, debug) {
-var PromiseInspection = Promise.PromiseInspection;
-var util = __webpack_require__(248);
-
-function SettledPromiseArray(values) {
- this.constructor$(values);
-}
-util.inherits(SettledPromiseArray, PromiseArray);
-
-SettledPromiseArray.prototype._promiseResolved = function (index, inspection) {
- this._values[index] = inspection;
- var totalResolved = ++this._totalResolved;
- if (totalResolved >= this._length) {
- this._resolve(this._values);
- return true;
+ /**
+ * Writes a BigInt64LE value to the current write position (or at optional offset).
+ *
+ * @param value { BigInt } The value to write.
+ * @param offset { Number } The offset to write the value at.
+ *
+ * @return this
+ */
+ writeBigInt64LE(value, offset) {
+ utils_1.bigIntAndBufferInt64Check('writeBigInt64LE');
+ return this._writeNumberValue(Buffer.prototype.writeBigInt64LE, 8, value, offset);
}
- return false;
-};
-
-SettledPromiseArray.prototype._promiseFulfilled = function (value, index) {
- var ret = new PromiseInspection();
- ret._bitField = 33554432;
- ret._settledValueField = value;
- return this._promiseResolved(index, ret);
-};
-SettledPromiseArray.prototype._promiseRejected = function (reason, index) {
- var ret = new PromiseInspection();
- ret._bitField = 16777216;
- ret._settledValueField = reason;
- return this._promiseResolved(index, ret);
-};
-
-Promise.settle = function (promises) {
- debug.deprecated(".settle()", ".reflect()");
- return new SettledPromiseArray(promises).promise();
-};
-
-Promise.allSettled = function (promises) {
- return new SettledPromiseArray(promises).promise();
-};
-
-Promise.prototype.settle = function () {
- return Promise.settle(this);
-};
-};
-
-
-/***/ }),
-/* 150 */,
-/* 151 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-/*
- * Copyright The OpenTelemetry Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.NOOP_TRACER = exports.NoopTracer = void 0;
-var NoopSpan_1 = __webpack_require__(767);
-/**
- * No-op implementations of {@link Tracer}.
- */
-var NoopTracer = /** @class */ (function () {
- function NoopTracer() {
+ /**
+ * Inserts a Int64LE value at the given offset value.
+ *
+ * @param value { BigInt } The value to insert.
+ * @param offset { Number } The offset to insert the value at.
+ *
+ * @return this
+ */
+ insertBigInt64LE(value, offset) {
+ utils_1.bigIntAndBufferInt64Check('writeBigInt64LE');
+ return this._insertNumberValue(Buffer.prototype.writeBigInt64LE, 8, value, offset);
}
- NoopTracer.prototype.getCurrentSpan = function () {
- return NoopSpan_1.NOOP_SPAN;
- };
- // startSpan starts a noop span.
- NoopTracer.prototype.startSpan = function (name, options) {
- return NoopSpan_1.NOOP_SPAN;
- };
- NoopTracer.prototype.withSpan = function (span, fn) {
- return fn();
- };
- NoopTracer.prototype.bind = function (target, span) {
- return target;
- };
- return NoopTracer;
-}());
-exports.NoopTracer = NoopTracer;
-exports.NOOP_TRACER = new NoopTracer();
-//# sourceMappingURL=NoopTracer.js.map
-
-/***/ }),
-/* 152 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-var Stream = __webpack_require__(794).Stream;
-var util = __webpack_require__(669);
-
-module.exports = DelayedStream;
-function DelayedStream() {
- this.source = null;
- this.dataSize = 0;
- this.maxDataSize = 1024 * 1024;
- this.pauseStream = true;
-
- this._maxDataSizeExceeded = false;
- this._released = false;
- this._bufferedEvents = [];
-}
-util.inherits(DelayedStream, Stream);
-
-DelayedStream.create = function(source, options) {
- var delayedStream = new this();
-
- options = options || {};
- for (var option in options) {
- delayedStream[option] = options[option];
- }
-
- delayedStream.source = source;
-
- var realEmit = source.emit;
- source.emit = function() {
- delayedStream._handleEmit(arguments);
- return realEmit.apply(source, arguments);
- };
-
- source.on('error', function() {});
- if (delayedStream.pauseStream) {
- source.pause();
- }
-
- return delayedStream;
-};
-
-Object.defineProperty(DelayedStream.prototype, 'readable', {
- configurable: true,
- enumerable: true,
- get: function() {
- return this.source.readable;
- }
-});
-
-DelayedStream.prototype.setEncoding = function() {
- return this.source.setEncoding.apply(this.source, arguments);
-};
-
-DelayedStream.prototype.resume = function() {
- if (!this._released) {
- this.release();
- }
-
- this.source.resume();
-};
-
-DelayedStream.prototype.pause = function() {
- this.source.pause();
-};
-
-DelayedStream.prototype.release = function() {
- this._released = true;
-
- this._bufferedEvents.forEach(function(args) {
- this.emit.apply(this, args);
- }.bind(this));
- this._bufferedEvents = [];
-};
-
-DelayedStream.prototype.pipe = function() {
- var r = Stream.prototype.pipe.apply(this, arguments);
- this.resume();
- return r;
-};
-
-DelayedStream.prototype._handleEmit = function(args) {
- if (this._released) {
- this.emit.apply(this, args);
- return;
- }
-
- if (args[0] === 'data') {
- this.dataSize += args[1].length;
- this._checkIfMaxDataSizeExceeded();
- }
-
- this._bufferedEvents.push(args);
-};
-
-DelayedStream.prototype._checkIfMaxDataSizeExceeded = function() {
- if (this._maxDataSizeExceeded) {
- return;
- }
-
- if (this.dataSize <= this.maxDataSize) {
- return;
- }
-
- this._maxDataSizeExceeded = true;
- var message =
- 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.'
- this.emit('error', new Error(message));
-};
-
-
-/***/ }),
-/* 153 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-var core = __webpack_require__(391);
-
-module.exports = function isCore(x) {
- return Object.prototype.hasOwnProperty.call(core, x);
-};
-
-
-/***/ }),
-/* 154 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-"use strict";
-
-
-const figgyPudding = __webpack_require__(122)
-const index = __webpack_require__(407)
-const memo = __webpack_require__(521)
-const write = __webpack_require__(186)
-const to = __webpack_require__(371).to
-
-const PutOpts = figgyPudding({
- algorithms: {
- default: ['sha512']
- },
- integrity: {},
- memoize: {},
- metadata: {},
- pickAlgorithm: {},
- size: {},
- tmpPrefix: {},
- single: {},
- sep: {},
- error: {},
- strict: {}
-})
-
-module.exports = putData
-function putData (cache, key, data, opts) {
- opts = PutOpts(opts)
- return write(cache, data, opts).then(res => {
- return index.insert(
- cache, key, res.integrity, opts.concat({ size: res.size })
- ).then(entry => {
- if (opts.memoize) {
- memo.put(cache, entry, data, opts)
- }
- return res.integrity
- })
- })
-}
-
-module.exports.stream = putStream
-function putStream (cache, key, opts) {
- opts = PutOpts(opts)
- let integrity
- let size
- const contentStream = write.stream(
- cache, opts
- ).on('integrity', int => {
- integrity = int
- }).on('size', s => {
- size = s
- })
- let memoData
- let memoTotal = 0
- const stream = to((chunk, enc, cb) => {
- contentStream.write(chunk, enc, () => {
- if (opts.memoize) {
- if (!memoData) { memoData = [] }
- memoData.push(chunk)
- memoTotal += chunk.length
- }
- cb()
- })
- }, cb => {
- contentStream.end(() => {
- index.insert(cache, key, integrity, opts.concat({ size })).then(entry => {
- if (opts.memoize) {
- memo.put(cache, entry, Buffer.concat(memoData, memoTotal), opts)
- }
- stream.emit('integrity', integrity)
- cb()
- })
- })
- })
- let erred = false
- stream.once('error', err => {
- if (erred) { return }
- erred = true
- contentStream.emit('error', err)
- })
- contentStream.once('error', err => {
- if (erred) { return }
- erred = true
- stream.emit('error', err)
- })
- return stream
-}
-
-
-/***/ }),
-/* 155 */
-/***/ (function(module) {
-
-module.exports = extractDescription
-
-// Extracts description from contents of a readme file in markdown format
-function extractDescription (d) {
- if (!d) return;
- if (d === "ERROR: No README data found!") return;
- // the first block of text before the first heading
- // that isn't the first line heading
- d = d.trim().split('\n')
- for (var s = 0; d[s] && d[s].trim().match(/^(#|$)/); s ++);
- var l = d.length
- for (var e = s + 1; e < l && d[e].trim(); e ++);
- return d.slice(s, e).join(' ').trim()
-}
-
-
-/***/ }),
-/* 156 */,
-/* 157 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-var async = __webpack_require__(751)
- , abort = __webpack_require__(566)
- ;
-
-// API
-module.exports = iterate;
-
-/**
- * Iterates over each job object
- *
- * @param {array|object} list - array or object (named list) to iterate over
- * @param {function} iterator - iterator to run
- * @param {object} state - current job status
- * @param {function} callback - invoked when all elements processed
- */
-function iterate(list, iterator, state, callback)
-{
- // store current index
- var key = state['keyedList'] ? state['keyedList'][state.index] : state.index;
-
- state.jobs[key] = runJob(iterator, key, list[key], function(error, output)
- {
- // don't repeat yourself
- // skip secondary callbacks
- if (!(key in state.jobs))
- {
- return;
+ // Unsigned Integers
+ /**
+ * Reads an UInt8 value from the current read position or an optionally provided offset.
+ *
+ * @param offset { Number } The offset to read data from (optional)
+ * @return { Number }
+ */
+ readUInt8(offset) {
+ return this._readNumberValue(Buffer.prototype.readUInt8, 1, offset);
}
-
- // clean up jobs
- delete state.jobs[key];
-
- if (error)
- {
- // don't process rest of the results
- // stop still active jobs
- // and reset the list
- abort(state);
+ /**
+ * Reads an UInt16BE value from the current read position or an optionally provided offset.
+ *
+ * @param offset { Number } The offset to read data from (optional)
+ * @return { Number }
+ */
+ readUInt16BE(offset) {
+ return this._readNumberValue(Buffer.prototype.readUInt16BE, 2, offset);
}
- else
- {
- state.results[key] = output;
+ /**
+ * Reads an UInt16LE value from the current read position or an optionally provided offset.
+ *
+ * @param offset { Number } The offset to read data from (optional)
+ * @return { Number }
+ */
+ readUInt16LE(offset) {
+ return this._readNumberValue(Buffer.prototype.readUInt16LE, 2, offset);
}
-
- // return salvaged results
- callback(error, state.results);
- });
-}
-
-/**
- * Runs iterator over provided job element
- *
- * @param {function} iterator - iterator to invoke
- * @param {string|number} key - key/index of the element in the list of jobs
- * @param {mixed} item - job description
- * @param {function} callback - invoked after iterator is done with the job
- * @returns {function|mixed} - job abort function or something else
- */
-function runJob(iterator, key, item, callback)
-{
- var aborter;
-
- // allow shortcut if iterator expects only two arguments
- if (iterator.length == 2)
- {
- aborter = iterator(item, async(callback));
- }
- // otherwise go with full three arguments
- else
- {
- aborter = iterator(item, key, async(callback));
- }
-
- return aborter;
-}
-
-
-/***/ }),
-/* 158 */
-/***/ (function(__unusedmodule, exports) {
-
-"use strict";
-
-Object.defineProperty(exports, "__esModule", { value: true });
-//# sourceMappingURL=Time.js.map
-
-/***/ }),
-/* 159 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-"use strict";
-
-
-// tar -c
-const hlo = __webpack_require__(891)
-
-const Pack = __webpack_require__(415)
-const fs = __webpack_require__(747)
-const fsm = __webpack_require__(827)
-const t = __webpack_require__(579)
-const path = __webpack_require__(622)
-
-const c = module.exports = (opt_, files, cb) => {
- if (typeof files === 'function')
- cb = files
-
- if (Array.isArray(opt_))
- files = opt_, opt_ = {}
-
- if (!files || !Array.isArray(files) || !files.length)
- throw new TypeError('no files or directories specified')
-
- files = Array.from(files)
-
- const opt = hlo(opt_)
-
- if (opt.sync && typeof cb === 'function')
- throw new TypeError('callback not supported for sync tar functions')
-
- if (!opt.file && typeof cb === 'function')
- throw new TypeError('callback only supported with file option')
-
- return opt.file && opt.sync ? createFileSync(opt, files)
- : opt.file ? createFile(opt, files, cb)
- : opt.sync ? createSync(opt, files)
- : create(opt, files)
-}
-
-const createFileSync = (opt, files) => {
- const p = new Pack.Sync(opt)
- const stream = new fsm.WriteStreamSync(opt.file, {
- mode: opt.mode || 0o666
- })
- p.pipe(stream)
- addFilesSync(p, files)
-}
-
-const createFile = (opt, files, cb) => {
- const p = new Pack(opt)
- const stream = new fsm.WriteStream(opt.file, {
- mode: opt.mode || 0o666
- })
- p.pipe(stream)
-
- const promise = new Promise((res, rej) => {
- stream.on('error', rej)
- stream.on('close', res)
- p.on('error', rej)
- })
-
- addFilesAsync(p, files)
-
- return cb ? promise.then(cb, cb) : promise
-}
-
-const addFilesSync = (p, files) => {
- files.forEach(file => {
- if (file.charAt(0) === '@')
- t({
- file: path.resolve(p.cwd, file.substr(1)),
- sync: true,
- noResume: true,
- onentry: entry => p.add(entry)
- })
- else
- p.add(file)
- })
- p.end()
-}
-
-const addFilesAsync = (p, files) => {
- while (files.length) {
- const file = files.shift()
- if (file.charAt(0) === '@')
- return t({
- file: path.resolve(p.cwd, file.substr(1)),
- noResume: true,
- onentry: entry => p.add(entry)
- }).then(_ => addFilesAsync(p, files))
- else
- p.add(file)
- }
- p.end()
-}
-
-const createSync = (opt, files) => {
- const p = new Pack.Sync(opt)
- addFilesSync(p, files)
- return p
-}
-
-const create = (opt, files) => {
- const p = new Pack(opt)
- addFilesAsync(p, files)
- return p
-}
-
-
-/***/ }),
-/* 160 */,
-/* 161 */
-/***/ (function(module, exports, __webpack_require__) {
-
-/**
- * Module dependencies.
- */
-
-var tty = __webpack_require__(867);
-var util = __webpack_require__(669);
-
-/**
- * This is the Node.js implementation of `debug()`.
- *
- * Expose `debug()` as the module.
- */
-
-exports = module.exports = __webpack_require__(778);
-exports.init = init;
-exports.log = log;
-exports.formatArgs = formatArgs;
-exports.save = save;
-exports.load = load;
-exports.useColors = useColors;
-
-/**
- * Colors.
- */
-
-exports.colors = [ 6, 2, 3, 4, 5, 1 ];
-
-try {
- var supportsColor = __webpack_require__(247);
- if (supportsColor && supportsColor.level >= 2) {
- exports.colors = [
- 20, 21, 26, 27, 32, 33, 38, 39, 40, 41, 42, 43, 44, 45, 56, 57, 62, 63, 68,
- 69, 74, 75, 76, 77, 78, 79, 80, 81, 92, 93, 98, 99, 112, 113, 128, 129, 134,
- 135, 148, 149, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171,
- 172, 173, 178, 179, 184, 185, 196, 197, 198, 199, 200, 201, 202, 203, 204,
- 205, 206, 207, 208, 209, 214, 215, 220, 221
- ];
- }
-} catch (err) {
- // swallow - we only care if `supports-color` is available; it doesn't have to be.
-}
-
-/**
- * Build up the default `inspectOpts` object from the environment variables.
- *
- * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js
- */
-
-exports.inspectOpts = Object.keys(process.env).filter(function (key) {
- return /^debug_/i.test(key);
-}).reduce(function (obj, key) {
- // camel-case
- var prop = key
- .substring(6)
- .toLowerCase()
- .replace(/_([a-z])/g, function (_, k) { return k.toUpperCase() });
-
- // coerce string value into JS value
- var val = process.env[key];
- if (/^(yes|on|true|enabled)$/i.test(val)) val = true;
- else if (/^(no|off|false|disabled)$/i.test(val)) val = false;
- else if (val === 'null') val = null;
- else val = Number(val);
-
- obj[prop] = val;
- return obj;
-}, {});
-
-/**
- * Is stdout a TTY? Colored output is enabled when `true`.
- */
-
-function useColors() {
- return 'colors' in exports.inspectOpts
- ? Boolean(exports.inspectOpts.colors)
- : tty.isatty(process.stderr.fd);
-}
-
-/**
- * Map %o to `util.inspect()`, all on a single line.
- */
-
-exports.formatters.o = function(v) {
- this.inspectOpts.colors = this.useColors;
- return util.inspect(v, this.inspectOpts)
- .split('\n').map(function(str) {
- return str.trim()
- }).join(' ');
-};
-
-/**
- * Map %o to `util.inspect()`, allowing multiple lines if needed.
- */
-
-exports.formatters.O = function(v) {
- this.inspectOpts.colors = this.useColors;
- return util.inspect(v, this.inspectOpts);
-};
-
-/**
- * Adds ANSI color escape codes if enabled.
- *
- * @api public
- */
-
-function formatArgs(args) {
- var name = this.namespace;
- var useColors = this.useColors;
-
- if (useColors) {
- var c = this.color;
- var colorCode = '\u001b[3' + (c < 8 ? c : '8;5;' + c);
- var prefix = ' ' + colorCode + ';1m' + name + ' ' + '\u001b[0m';
-
- args[0] = prefix + args[0].split('\n').join('\n' + prefix);
- args.push(colorCode + 'm+' + exports.humanize(this.diff) + '\u001b[0m');
- } else {
- args[0] = getDate() + name + ' ' + args[0];
- }
-}
-
-function getDate() {
- if (exports.inspectOpts.hideDate) {
- return '';
- } else {
- return new Date().toISOString() + ' ';
- }
-}
-
-/**
- * Invokes `util.format()` with the specified arguments and writes to stderr.
- */
-
-function log() {
- return process.stderr.write(util.format.apply(util, arguments) + '\n');
-}
-
-/**
- * Save `namespaces`.
- *
- * @param {String} namespaces
- * @api private
- */
-
-function save(namespaces) {
- if (null == namespaces) {
- // If you set a process.env field to null or undefined, it gets cast to the
- // string 'null' or 'undefined'. Just delete instead.
- delete process.env.DEBUG;
- } else {
- process.env.DEBUG = namespaces;
- }
-}
-
-/**
- * Load `namespaces`.
- *
- * @return {String} returns the previously persisted debug modes
- * @api private
- */
-
-function load() {
- return process.env.DEBUG;
-}
-
-/**
- * Init logic for `debug` instances.
- *
- * Create a new `inspectOpts` object in case `useColors` is set
- * differently for a particular `debug` instance.
- */
-
-function init (debug) {
- debug.inspectOpts = {};
-
- var keys = Object.keys(exports.inspectOpts);
- for (var i = 0; i < keys.length; i++) {
- debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]];
- }
-}
-
-/**
- * Enable namespaces listed in `process.env.DEBUG` initially.
- */
-
-exports.enable(load());
-
-
-/***/ }),
-/* 162 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-"use strict";
-
-var Buffer = __webpack_require__(215).Buffer;
-
-// Export Node.js internal encodings.
-
-module.exports = {
- // Encodings
- utf8: { type: "_internal", bomAware: true},
- cesu8: { type: "_internal", bomAware: true},
- unicode11utf8: "utf8",
-
- ucs2: { type: "_internal", bomAware: true},
- utf16le: "ucs2",
-
- binary: { type: "_internal" },
- base64: { type: "_internal" },
- hex: { type: "_internal" },
-
- // Codec.
- _internal: InternalCodec,
-};
-
-//------------------------------------------------------------------------------
-
-function InternalCodec(codecOptions, iconv) {
- this.enc = codecOptions.encodingName;
- this.bomAware = codecOptions.bomAware;
-
- if (this.enc === "base64")
- this.encoder = InternalEncoderBase64;
- else if (this.enc === "cesu8") {
- this.enc = "utf8"; // Use utf8 for decoding.
- this.encoder = InternalEncoderCesu8;
-
- // Add decoder for versions of Node not supporting CESU-8
- if (Buffer.from('eda0bdedb2a9', 'hex').toString() !== '💩') {
- this.decoder = InternalDecoderCesu8;
- this.defaultCharUnicode = iconv.defaultCharUnicode;
- }
+ /**
+ * Reads an UInt32BE value from the current read position or an optionally provided offset.
+ *
+ * @param offset { Number } The offset to read data from (optional)
+ * @return { Number }
+ */
+ readUInt32BE(offset) {
+ return this._readNumberValue(Buffer.prototype.readUInt32BE, 4, offset);
}
-}
-
-InternalCodec.prototype.encoder = InternalEncoder;
-InternalCodec.prototype.decoder = InternalDecoder;
-
-//------------------------------------------------------------------------------
-
-// We use node.js internal decoder. Its signature is the same as ours.
-var StringDecoder = __webpack_require__(304).StringDecoder;
-
-if (!StringDecoder.prototype.end) // Node v0.8 doesn't have this method.
- StringDecoder.prototype.end = function() {};
-
-
-function InternalDecoder(options, codec) {
- this.decoder = new StringDecoder(codec.enc);
-}
-
-InternalDecoder.prototype.write = function(buf) {
- if (!Buffer.isBuffer(buf)) {
- buf = Buffer.from(buf);
+ /**
+ * Reads an UInt32LE value from the current read position or an optionally provided offset.
+ *
+ * @param offset { Number } The offset to read data from (optional)
+ * @return { Number }
+ */
+ readUInt32LE(offset) {
+ return this._readNumberValue(Buffer.prototype.readUInt32LE, 4, offset);
}
-
- return this.decoder.write(buf);
-}
-
-InternalDecoder.prototype.end = function() {
- return this.decoder.end();
-}
-
-
-//------------------------------------------------------------------------------
-// Encoder is mostly trivial
-
-function InternalEncoder(options, codec) {
- this.enc = codec.enc;
-}
-
-InternalEncoder.prototype.write = function(str) {
- return Buffer.from(str, this.enc);
-}
-
-InternalEncoder.prototype.end = function() {
-}
-
-
-//------------------------------------------------------------------------------
-// Except base64 encoder, which must keep its state.
-
-function InternalEncoderBase64(options, codec) {
- this.prevStr = '';
-}
-
-InternalEncoderBase64.prototype.write = function(str) {
- str = this.prevStr + str;
- var completeQuads = str.length - (str.length % 4);
- this.prevStr = str.slice(completeQuads);
- str = str.slice(0, completeQuads);
-
- return Buffer.from(str, "base64");
-}
-
-InternalEncoderBase64.prototype.end = function() {
- return Buffer.from(this.prevStr, "base64");
-}
-
-
-//------------------------------------------------------------------------------
-// CESU-8 encoder is also special.
-
-function InternalEncoderCesu8(options, codec) {
-}
-
-InternalEncoderCesu8.prototype.write = function(str) {
- var buf = Buffer.alloc(str.length * 3), bufIdx = 0;
- for (var i = 0; i < str.length; i++) {
- var charCode = str.charCodeAt(i);
- // Naive implementation, but it works because CESU-8 is especially easy
- // to convert from UTF-16 (which all JS strings are encoded in).
- if (charCode < 0x80)
- buf[bufIdx++] = charCode;
- else if (charCode < 0x800) {
- buf[bufIdx++] = 0xC0 + (charCode >>> 6);
- buf[bufIdx++] = 0x80 + (charCode & 0x3f);
- }
- else { // charCode will always be < 0x10000 in javascript.
- buf[bufIdx++] = 0xE0 + (charCode >>> 12);
- buf[bufIdx++] = 0x80 + ((charCode >>> 6) & 0x3f);
- buf[bufIdx++] = 0x80 + (charCode & 0x3f);
- }
+ /**
+ * Reads a BigUInt64BE value from the current read position or an optionally provided offset.
+ *
+ * @param offset { Number } The offset to read data from (optional)
+ * @return { BigInt }
+ */
+ readBigUInt64BE(offset) {
+ utils_1.bigIntAndBufferInt64Check('readBigUInt64BE');
+ return this._readNumberValue(Buffer.prototype.readBigUInt64BE, 8, offset);
}
- return buf.slice(0, bufIdx);
-}
-
-InternalEncoderCesu8.prototype.end = function() {
-}
-
-//------------------------------------------------------------------------------
-// CESU-8 decoder is not implemented in Node v4.0+
-
-function InternalDecoderCesu8(options, codec) {
- this.acc = 0;
- this.contBytes = 0;
- this.accBytes = 0;
- this.defaultCharUnicode = codec.defaultCharUnicode;
-}
-
-InternalDecoderCesu8.prototype.write = function(buf) {
- var acc = this.acc, contBytes = this.contBytes, accBytes = this.accBytes,
- res = '';
- for (var i = 0; i < buf.length; i++) {
- var curByte = buf[i];
- if ((curByte & 0xC0) !== 0x80) { // Leading byte
- if (contBytes > 0) { // Previous code is invalid
- res += this.defaultCharUnicode;
- contBytes = 0;
- }
-
- if (curByte < 0x80) { // Single-byte code
- res += String.fromCharCode(curByte);
- } else if (curByte < 0xE0) { // Two-byte code
- acc = curByte & 0x1F;
- contBytes = 1; accBytes = 1;
- } else if (curByte < 0xF0) { // Three-byte code
- acc = curByte & 0x0F;
- contBytes = 2; accBytes = 1;
- } else { // Four or more are not supported for CESU-8.
- res += this.defaultCharUnicode;
- }
- } else { // Continuation byte
- if (contBytes > 0) { // We're waiting for it.
- acc = (acc << 6) | (curByte & 0x3f);
- contBytes--; accBytes++;
- if (contBytes === 0) {
- // Check for overlong encoding, but support Modified UTF-8 (encoding NULL as C0 80)
- if (accBytes === 2 && acc < 0x80 && acc > 0)
- res += this.defaultCharUnicode;
- else if (accBytes === 3 && acc < 0x800)
- res += this.defaultCharUnicode;
- else
- // Actually add character.
- res += String.fromCharCode(acc);
- }
- } else { // Unexpected continuation byte
- res += this.defaultCharUnicode;
- }
- }
+ /**
+ * Reads a BigUInt64LE value from the current read position or an optionally provided offset.
+ *
+ * @param offset { Number } The offset to read data from (optional)
+ * @return { BigInt }
+ */
+ readBigUInt64LE(offset) {
+ utils_1.bigIntAndBufferInt64Check('readBigUInt64LE');
+ return this._readNumberValue(Buffer.prototype.readBigUInt64LE, 8, offset);
}
- this.acc = acc; this.contBytes = contBytes; this.accBytes = accBytes;
- return res;
-}
-
-InternalDecoderCesu8.prototype.end = function() {
- var res = 0;
- if (this.contBytes > 0)
- res += this.defaultCharUnicode;
- return res;
-}
-
-
-/***/ }),
-/* 163 */,
-/* 164 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-var once = __webpack_require__(49)
-var eos = __webpack_require__(3)
-var fs = __webpack_require__(747) // we only need fs to get the ReadStream and WriteStream prototypes
-
-var noop = function () {}
-var ancient = /^v?\.0/.test(process.version)
-
-var isFn = function (fn) {
- return typeof fn === 'function'
-}
-
-var isFS = function (stream) {
- if (!ancient) return false // newer node version do not need to care about fs is a special way
- if (!fs) return false // browser
- return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close)
-}
-
-var isRequest = function (stream) {
- return stream.setHeader && isFn(stream.abort)
-}
-
-var destroyer = function (stream, reading, writing, callback) {
- callback = once(callback)
-
- var closed = false
- stream.on('close', function () {
- closed = true
- })
-
- eos(stream, {readable: reading, writable: writing}, function (err) {
- if (err) return callback(err)
- closed = true
- callback()
- })
-
- var destroyed = false
- return function (err) {
- if (closed) return
- if (destroyed) return
- destroyed = true
-
- if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks
- if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want
-
- if (isFn(stream.destroy)) return stream.destroy()
-
- callback(err || new Error('stream was destroyed'))
- }
-}
-
-var call = function (fn) {
- fn()
-}
-
-var pipe = function (from, to) {
- return from.pipe(to)
-}
-
-var pump = function () {
- var streams = Array.prototype.slice.call(arguments)
- var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop
-
- if (Array.isArray(streams[0])) streams = streams[0]
- if (streams.length < 2) throw new Error('pump requires two streams per minimum')
-
- var error
- var destroys = streams.map(function (stream, i) {
- var reading = i < streams.length - 1
- var writing = i > 0
- return destroyer(stream, reading, writing, function (err) {
- if (!error) error = err
- if (err) destroys.forEach(call)
- if (reading) return
- destroys.forEach(call)
- callback(error)
- })
- })
-
- streams.reduce(pipe)
-}
-
-module.exports = pump
-
-
-/***/ }),
-/* 165 */
-/***/ (function(__unusedmodule, exports) {
-
-"use strict";
-
-/*
- * Copyright The OpenTelemetry Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-Object.defineProperty(exports, "__esModule", { value: true });
-//# sourceMappingURL=Plugin.js.map
-
-/***/ }),
-/* 166 */,
-/* 167 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-module.exports = normalize
-
-var fixer = __webpack_require__(615)
-normalize.fixer = fixer
-
-var makeWarning = __webpack_require__(362)
-
-var fieldsToFix = ['name','version','description','repository','modules','scripts'
- ,'files','bin','man','bugs','keywords','readme','homepage','license']
-var otherThingsToFix = ['dependencies','people', 'typos']
-
-var thingsToFix = fieldsToFix.map(function(fieldName) {
- return ucFirst(fieldName) + "Field"
-})
-// two ways to do this in CoffeeScript on only one line, sub-70 chars:
-// thingsToFix = fieldsToFix.map (name) -> ucFirst(name) + "Field"
-// thingsToFix = (ucFirst(name) + "Field" for name in fieldsToFix)
-thingsToFix = thingsToFix.concat(otherThingsToFix)
-
-function normalize (data, warn, strict) {
- if(warn === true) warn = null, strict = true
- if(!strict) strict = false
- if(!warn || data.private) warn = function(msg) { /* noop */ }
-
- if (data.scripts &&
- data.scripts.install === "node-gyp rebuild" &&
- !data.scripts.preinstall) {
- data.gypfile = true
- }
- fixer.warn = function() { warn(makeWarning.apply(null, arguments)) }
- thingsToFix.forEach(function(thingName) {
- fixer["fix" + ucFirst(thingName)](data, strict)
- })
- data._id = data.name + "@" + data.version
-}
-
-function ucFirst (string) {
- return string.charAt(0).toUpperCase() + string.slice(1);
-}
-
-
-/***/ }),
-/* 168 */,
-/* 169 */
-/***/ (function(module) {
-
-"use strict";
-
-
-/*
- * Role
- *
- * A Role encapsulates a particular object's 'role' in a method's
- * dispatch. They are added directly to the selector for a method, and thus
- * do not prevent the objects a method was defined on from being garbage
- * collected.
- */
-module.exports = Role
-function Role (method, position) {
- this.method = method
- this.position = position
-}
-
-Role.roleKeyName = Symbol('roles')
-
-
-/***/ }),
-/* 170 */,
-/* 171 */,
-/* 172 */,
-/* 173 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-
-Object.defineProperty(exports, "__esModule", {
- value: true
-});
-exports.default = void 0;
-
-var _rng = _interopRequireDefault(__webpack_require__(944));
-
-var _stringify = _interopRequireDefault(__webpack_require__(855));
-
-function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
-
-// **`v1()` - Generate time-based UUID**
-//
-// Inspired by https://github.com/LiosK/UUID.js
-// and http://docs.python.org/library/uuid.html
-let _nodeId;
-
-let _clockseq; // Previous uuid creation time
-
-
-let _lastMSecs = 0;
-let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details
-
-function v1(options, buf, offset) {
- let i = buf && offset || 0;
- const b = buf || new Array(16);
- options = options || {};
- let node = options.node || _nodeId;
- let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not
- // specified. We do this lazily to minimize issues related to insufficient
- // system entropy. See #189
-
- if (node == null || clockseq == null) {
- const seedBytes = options.random || (options.rng || _rng.default)();
-
- if (node == null) {
- // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
- node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];
+ /**
+ * Writes an UInt8 value to the current write position (or at optional offset).
+ *
+ * @param value { Number } The value to write.
+ * @param offset { Number } The offset to write the value at.
+ *
+ * @return this
+ */
+ writeUInt8(value, offset) {
+ return this._writeNumberValue(Buffer.prototype.writeUInt8, 1, value, offset);
}
-
- if (clockseq == null) {
- // Per 4.2.2, randomize (14 bit) clockseq
- clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;
+ /**
+ * Inserts an UInt8 value at the given offset value.
+ *
+ * @param value { Number } The value to insert.
+ * @param offset { Number } The offset to insert the value at.
+ *
+ * @return this
+ */
+ insertUInt8(value, offset) {
+ return this._insertNumberValue(Buffer.prototype.writeUInt8, 1, value, offset);
}
- } // UUID timestamps are 100 nano-second units since the Gregorian epoch,
- // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so
- // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
- // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
-
-
- let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock
- // cycle to simulate higher resolution clock
-
- let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)
-
- const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression
-
- if (dt < 0 && options.clockseq === undefined) {
- clockseq = clockseq + 1 & 0x3fff;
- } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
- // time interval
-
-
- if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {
- nsecs = 0;
- } // Per 4.2.1.2 Throw error if too many uuids are requested
-
-
- if (nsecs >= 10000) {
- throw new Error("uuid.v1(): Can't create more than 10M uuids/sec");
- }
-
- _lastMSecs = msecs;
- _lastNSecs = nsecs;
- _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch
-
- msecs += 12219292800000; // `time_low`
-
- const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;
- b[i++] = tl >>> 24 & 0xff;
- b[i++] = tl >>> 16 & 0xff;
- b[i++] = tl >>> 8 & 0xff;
- b[i++] = tl & 0xff; // `time_mid`
-
- const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;
- b[i++] = tmh >>> 8 & 0xff;
- b[i++] = tmh & 0xff; // `time_high_and_version`
-
- b[i++] = tmh >>> 24 & 0xf | 0x10; // include version
-
- b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
-
- b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`
-
- b[i++] = clockseq & 0xff; // `node`
-
- for (let n = 0; n < 6; ++n) {
- b[i + n] = node[n];
- }
-
- return buf || (0, _stringify.default)(b);
-}
-
-var _default = v1;
-exports.default = _default;
-
-/***/ }),
-/* 174 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-"use strict";
-
-var util = __webpack_require__(669)
-var TrackerBase = __webpack_require__(187)
-var Tracker = __webpack_require__(623)
-var TrackerStream = __webpack_require__(235)
-
-var TrackerGroup = module.exports = function (name) {
- TrackerBase.call(this, name)
- this.parentGroup = null
- this.trackers = []
- this.completion = {}
- this.weight = {}
- this.totalWeight = 0
- this.finished = false
- this.bubbleChange = bubbleChange(this)
-}
-util.inherits(TrackerGroup, TrackerBase)
-
-function bubbleChange (trackerGroup) {
- return function (name, completed, tracker) {
- trackerGroup.completion[tracker.id] = completed
- if (trackerGroup.finished) return
- trackerGroup.emit('change', name || trackerGroup.name, trackerGroup.completed(), trackerGroup)
- }
-}
-
-TrackerGroup.prototype.nameInTree = function () {
- var names = []
- var from = this
- while (from) {
- names.unshift(from.name)
- from = from.parentGroup
- }
- return names.join('/')
-}
-
-TrackerGroup.prototype.addUnit = function (unit, weight) {
- if (unit.addUnit) {
- var toTest = this
- while (toTest) {
- if (unit === toTest) {
- throw new Error(
- 'Attempted to add tracker group ' +
- unit.name + ' to tree that already includes it ' +
- this.nameInTree(this))
- }
- toTest = toTest.parentGroup
+ /**
+ * Writes an UInt16BE value to the current write position (or at optional offset).
+ *
+ * @param value { Number } The value to write.
+ * @param offset { Number } The offset to write the value at.
+ *
+ * @return this
+ */
+ writeUInt16BE(value, offset) {
+ return this._writeNumberValue(Buffer.prototype.writeUInt16BE, 2, value, offset);
}
- unit.parentGroup = this
- }
- this.weight[unit.id] = weight || 1
- this.totalWeight += this.weight[unit.id]
- this.trackers.push(unit)
- this.completion[unit.id] = unit.completed()
- unit.on('change', this.bubbleChange)
- if (!this.finished) this.emit('change', unit.name, this.completion[unit.id], unit)
- return unit
-}
-
-TrackerGroup.prototype.completed = function () {
- if (this.trackers.length === 0) return 0
- var valPerWeight = 1 / this.totalWeight
- var completed = 0
- for (var ii = 0; ii < this.trackers.length; ii++) {
- var trackerId = this.trackers[ii].id
- completed += valPerWeight * this.weight[trackerId] * this.completion[trackerId]
- }
- return completed
-}
-
-TrackerGroup.prototype.newGroup = function (name, weight) {
- return this.addUnit(new TrackerGroup(name), weight)
-}
-
-TrackerGroup.prototype.newItem = function (name, todo, weight) {
- return this.addUnit(new Tracker(name, todo), weight)
-}
-
-TrackerGroup.prototype.newStream = function (name, todo, weight) {
- return this.addUnit(new TrackerStream(name, todo), weight)
-}
-
-TrackerGroup.prototype.finish = function () {
- this.finished = true
- if (!this.trackers.length) this.addUnit(new Tracker(), 1, true)
- for (var ii = 0; ii < this.trackers.length; ii++) {
- var tracker = this.trackers[ii]
- tracker.finish()
- tracker.removeListener('change', this.bubbleChange)
- }
- this.emit('change', this.name, 1, this)
-}
-
-var buffer = ' '
-TrackerGroup.prototype.debug = function (depth) {
- depth = depth || 0
- var indent = depth ? buffer.substr(0, depth) : ''
- var output = indent + (this.name || 'top') + ': ' + this.completed() + '\n'
- this.trackers.forEach(function (tracker) {
- if (tracker instanceof TrackerGroup) {
- output += tracker.debug(depth + 1)
- } else {
- output += indent + ' ' + tracker.name + ': ' + tracker.completed() + '\n'
+ /**
+ * Inserts an UInt16BE value at the given offset value.
+ *
+ * @param value { Number } The value to insert.
+ * @param offset { Number } The offset to insert the value at.
+ *
+ * @return this
+ */
+ insertUInt16BE(value, offset) {
+ return this._insertNumberValue(Buffer.prototype.writeUInt16BE, 2, value, offset);
}
- })
- return output
-}
-
-
-/***/ }),
-/* 175 */
-/***/ (function(module) {
-
-"use strict";
-
-
-module.exports = function spin (spinstr, spun) {
- return spinstr[spun % spinstr.length]
-}
-
-
-/***/ }),
-/* 176 */,
-/* 177 */,
-/* 178 */
-/***/ (function(module) {
-
-module.exports = ["389-exception","Autoconf-exception-2.0","Autoconf-exception-3.0","Bison-exception-2.2","Bootloader-exception","Classpath-exception-2.0","CLISP-exception-2.0","DigiRule-FOSS-exception","eCos-exception-2.0","Fawkes-Runtime-exception","FLTK-exception","Font-exception-2.0","freertos-exception-2.0","GCC-exception-2.0","GCC-exception-3.1","gnu-javamail-exception","GPL-3.0-linking-exception","GPL-3.0-linking-source-exception","GPL-CC-1.0","i2p-gpl-java-exception","Libtool-exception","Linux-syscall-note","LLVM-exception","LZMA-exception","mif-exception","Nokia-Qt-exception-1.1","OCaml-LGPL-linking-exception","OCCT-exception-1.0","OpenJDK-assembly-exception-1.0","openvpn-openssl-exception","PS-or-PDF-font-exception-20170817","Qt-GPL-exception-1.0","Qt-LGPL-exception-1.1","Qwt-exception-1.0","Swift-exception","u-boot-exception-2.0","Universal-FOSS-exception-1.0","WxWindows-exception-3.1"];
-
-/***/ }),
-/* 179 */,
-/* 180 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.run = void 0;
-const core_1 = __webpack_require__(470);
-const expo_1 = __webpack_require__(265);
-const install_1 = __webpack_require__(655);
-const system_1 = __webpack_require__(913);
-function run() {
- return __awaiter(this, void 0, void 0, function* () {
- const config = {
- version: core_1.getInput('expo-version') || 'latest',
- packager: core_1.getInput('expo-packager') || 'yarn',
- cache: (core_1.getInput('expo-cache') || 'false') === 'true',
- cacheKey: core_1.getInput('expo-cache-key') || undefined,
- };
- const path = yield core_1.group(config.cache
- ? `Installing Expo CLI from cache or with ${config.packager}`
- : `Installing Expo CLI with ${config.packager}`, () => install_1.install(config));
- core_1.addPath(path);
- yield core_1.group('Checking current authenticated account', () => expo_1.authenticate({
- token: core_1.getInput('expo-token') || undefined,
- username: core_1.getInput('expo-username') || undefined,
- password: core_1.getInput('expo-password') || undefined,
- }));
- const shouldPatchWatchers = core_1.getInput('expo-patch-watchers') || 'true';
- if (shouldPatchWatchers !== 'false') {
- yield core_1.group('Patching system watchers for the `ENOSPC` error', () => system_1.patchWatchers());
- }
- });
-}
-exports.run = run;
-
-
-/***/ }),
-/* 181 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-"use strict";
-
-
-const BB = __webpack_require__(489)
-
-const cacache = __webpack_require__(426)
-const cacheKey = __webpack_require__(279)
-const Fetcher = __webpack_require__(404)
-const git = __webpack_require__(729)
-const mkdirp = BB.promisify(__webpack_require__(626))
-const pickManifest = __webpack_require__(55)
-const optCheck = __webpack_require__(420)
-const osenv = __webpack_require__(580)
-const packDir = __webpack_require__(249)
-const PassThrough = __webpack_require__(794).PassThrough
-const path = __webpack_require__(622)
-const pipe = BB.promisify(__webpack_require__(371).pipe)
-const rimraf = BB.promisify(__webpack_require__(985))
-const uniqueFilename = __webpack_require__(94)
-
-// `git` dependencies are fetched from git repositories and packed up.
-const fetchGit = module.exports = Object.create(null)
-
-Fetcher.impl(fetchGit, {
- packument (spec, opts) {
- return BB.reject(new Error('Not implemented yet.'))
- },
-
- manifest (spec, opts) {
- opts = optCheck(opts)
- if (spec.hosted && spec.hosted.getDefaultRepresentation() === 'shortcut') {
- return hostedManifest(spec, opts)
- } else {
- // If it's not a shortcut, don't do fallbacks.
- return plainManifest(spec.fetchSpec, spec, opts)
+ /**
+ * Writes an UInt16LE value to the current write position (or at optional offset).
+ *
+ * @param value { Number } The value to write.
+ * @param offset { Number } The offset to write the value at.
+ *
+ * @return this
+ */
+ writeUInt16LE(value, offset) {
+ return this._writeNumberValue(Buffer.prototype.writeUInt16LE, 2, value, offset);
}
- },
-
- tarball (spec, opts) {
- opts = optCheck(opts)
- const stream = new PassThrough()
- this.manifest(spec, opts).then(manifest => {
- stream.emit('manifest', manifest)
- return pipe(
- this.fromManifest(
- manifest, spec, opts
- ).on('integrity', i => stream.emit('integrity', i)), stream
- )
- }).catch(err => stream.emit('error', err))
- return stream
- },
-
- fromManifest (manifest, spec, opts) {
- opts = optCheck(opts)
- let streamError
- const stream = new PassThrough().on('error', e => { streamError = e })
- const cacheName = manifest._uniqueResolved || manifest._resolved || ''
- const cacheStream = (
- opts.cache &&
- cacache.get.stream(
- opts.cache, cacheKey('packed-dir', cacheName), opts
- ).on('integrity', i => stream.emit('integrity', i))
- )
- cacheStream.pipe(stream)
- cacheStream.on('error', err => {
- if (err.code !== 'ENOENT') {
- return stream.emit('error', err)
- } else {
- stream.emit('reset')
- return withTmp(opts, tmp => {
- if (streamError) { throw streamError }
- return cloneRepo(
- spec, manifest._repo, manifest._ref, manifest._rawRef, tmp, opts
- ).then(HEAD => {
- if (streamError) { throw streamError }
- manifest._resolved = spec.saveSpec.replace(/(:?#.*)?$/, `#${HEAD}`)
- manifest._uniqueResolved = manifest._resolved
- return packDir(manifest, manifest._uniqueResolved, tmp, stream, opts)
- })
- }).catch(err => stream.emit('error', err))
- }
- })
- return stream
- }
-})
-
-function hostedManifest (spec, opts) {
- return BB.resolve(null).then(() => {
- if (!spec.hosted.git()) {
- throw new Error(`No git url for ${spec}`)
+ /**
+ * Inserts an UInt16LE value at the given offset value.
+ *
+ * @param value { Number } The value to insert.
+ * @param offset { Number } The offset to insert the value at.
+ *
+ * @return this
+ */
+ insertUInt16LE(value, offset) {
+ return this._insertNumberValue(Buffer.prototype.writeUInt16LE, 2, value, offset);
}
- return plainManifest(spec.hosted.git(), spec, opts)
- }).catch(err => {
- if (!spec.hosted.https()) {
- throw err
+ /**
+ * Writes an UInt32BE value to the current write position (or at optional offset).
+ *
+ * @param value { Number } The value to write.
+ * @param offset { Number } The offset to write the value at.
+ *
+ * @return this
+ */
+ writeUInt32BE(value, offset) {
+ return this._writeNumberValue(Buffer.prototype.writeUInt32BE, 4, value, offset);
}
- return plainManifest(spec.hosted.https(), spec, opts)
- }).catch(err => {
- if (!spec.hosted.sshurl()) {
- throw err
+ /**
+ * Inserts an UInt32BE value at the given offset value.
+ *
+ * @param value { Number } The value to insert.
+ * @param offset { Number } The offset to insert the value at.
+ *
+ * @return this
+ */
+ insertUInt32BE(value, offset) {
+ return this._insertNumberValue(Buffer.prototype.writeUInt32BE, 4, value, offset);
}
- return plainManifest(spec.hosted.sshurl(), spec, opts)
- })
-}
-
-function plainManifest (repo, spec, opts) {
- const rawRef = spec.gitCommittish || spec.gitRange
- return resolve(
- repo, spec, spec.name, opts
- ).then(ref => {
- if (ref) {
- const resolved = spec.saveSpec.replace(/(?:#.*)?$/, `#${ref.sha}`)
- return {
- _repo: repo,
- _resolved: resolved,
- _spec: spec,
- _ref: ref,
- _rawRef: spec.gitCommittish || spec.gitRange,
- _uniqueResolved: resolved,
- _integrity: false,
- _shasum: false
- }
- } else {
- // We're SOL and need a full clone :(
- //
- // If we're confident enough that `rawRef` is a commit SHA,
- // then we can at least get `finalize-manifest` to cache its result.
- const resolved = spec.saveSpec.replace(/(?:#.*)?$/, rawRef ? `#${rawRef}` : '')
- return {
- _repo: repo,
- _rawRef: rawRef,
- _resolved: rawRef && rawRef.match(/^[a-f0-9]{40}$/) && resolved,
- _uniqueResolved: rawRef && rawRef.match(/^[a-f0-9]{40}$/) && resolved,
- _integrity: false,
- _shasum: false
- }
+ /**
+ * Writes an UInt32LE value to the current write position (or at optional offset).
+ *
+ * @param value { Number } The value to write.
+ * @param offset { Number } The offset to write the value at.
+ *
+ * @return this
+ */
+ writeUInt32LE(value, offset) {
+ return this._writeNumberValue(Buffer.prototype.writeUInt32LE, 4, value, offset);
}
- })
-}
-
-function resolve (url, spec, name, opts) {
- const isSemver = !!spec.gitRange
- return git.revs(url, opts).then(remoteRefs => {
- return isSemver
- ? pickManifest({
- versions: remoteRefs.versions,
- 'dist-tags': remoteRefs['dist-tags'],
- name: name
- }, spec.gitRange, opts)
- : remoteRefs
- ? BB.resolve(
- remoteRefs.refs[spec.gitCommittish] || remoteRefs.refs[remoteRefs.shas[spec.gitCommittish]]
- )
- : null
- })
-}
-
-function withTmp (opts, cb) {
- if (opts.cache) {
- // cacache has a special facility for working in a tmp dir
- return cacache.tmp.withTmp(opts.cache, { tmpPrefix: 'git-clone' }, cb)
- } else {
- const tmpDir = path.join(osenv.tmpdir(), 'pacote-git-tmp')
- const tmpName = uniqueFilename(tmpDir, 'git-clone')
- const tmp = mkdirp(tmpName).then(() => tmpName).disposer(rimraf)
- return BB.using(tmp, cb)
- }
-}
-
-// Only certain whitelisted hosted gits support shadow cloning
-const SHALLOW_HOSTS = new Set(['github', 'gist', 'gitlab', 'bitbucket'])
-function cloneRepo (spec, repo, resolvedRef, rawRef, tmp, opts) {
- const ref = resolvedRef ? resolvedRef.ref : rawRef
- if (resolvedRef && spec.hosted && SHALLOW_HOSTS.has(spec.hosted.type)) {
- return git.shallow(repo, ref, tmp, opts)
- } else {
- return git.clone(repo, ref, tmp, opts)
- }
-}
-
-
-/***/ }),
-/* 182 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-"use strict";
-
-module.exports = writeFile
-module.exports.sync = writeFileSync
-module.exports._getTmpname = getTmpname // for testing
-module.exports._cleanupOnExit = cleanupOnExit
-
-var fs = __webpack_require__(598)
-var MurmurHash3 = __webpack_require__(188)
-var onExit = __webpack_require__(497)
-var path = __webpack_require__(622)
-var activeFiles = {}
-
-// if we run inside of a worker_thread, `process.pid` is not unique
-/* istanbul ignore next */
-var threadId = (function getId () {
- try {
- var workerThreads = __webpack_require__(13)
-
- /// if we are in main thread, this is set to `0`
- return workerThreads.threadId
- } catch (e) {
- // worker_threads are not available, fallback to 0
- return 0
- }
-})()
-
-var invocations = 0
-function getTmpname (filename) {
- return filename + '.' +
- MurmurHash3(__filename)
- .hash(String(process.pid))
- .hash(String(threadId))
- .hash(String(++invocations))
- .result()
-}
-
-function cleanupOnExit (tmpfile) {
- return function () {
- try {
- fs.unlinkSync(typeof tmpfile === 'function' ? tmpfile() : tmpfile)
- } catch (_) {}
- }
-}
-
-function writeFile (filename, data, options, callback) {
- if (options) {
- if (options instanceof Function) {
- callback = options
- options = {}
- } else if (typeof options === 'string') {
- options = { encoding: options }
+ /**
+ * Inserts an UInt32LE value at the given offset value.
+ *
+ * @param value { Number } The value to insert.
+ * @param offset { Number } The offset to insert the value at.
+ *
+ * @return this
+ */
+ insertUInt32LE(value, offset) {
+ return this._insertNumberValue(Buffer.prototype.writeUInt32LE, 4, value, offset);
}
- } else {
- options = {}
- }
-
- var Promise = options.Promise || global.Promise
- var truename
- var fd
- var tmpfile
- /* istanbul ignore next -- The closure only gets called when onExit triggers */
- var removeOnExitHandler = onExit(cleanupOnExit(() => tmpfile))
- var absoluteName = path.resolve(filename)
-
- new Promise(function serializeSameFile (resolve) {
- // make a queue if it doesn't already exist
- if (!activeFiles[absoluteName]) activeFiles[absoluteName] = []
-
- activeFiles[absoluteName].push(resolve) // add this job to the queue
- if (activeFiles[absoluteName].length === 1) resolve() // kick off the first one
- }).then(function getRealPath () {
- return new Promise(function (resolve) {
- fs.realpath(filename, function (_, realname) {
- truename = realname || filename
- tmpfile = getTmpname(truename)
- resolve()
- })
- })
- }).then(function stat () {
- return new Promise(function stat (resolve) {
- if (options.mode && options.chown) resolve()
- else {
- // Either mode or chown is not explicitly set
- // Default behavior is to copy it from original file
- fs.stat(truename, function (err, stats) {
- if (err || !stats) resolve()
- else {
- options = Object.assign({}, options)
-
- if (options.mode == null) {
- options.mode = stats.mode
- }
- if (options.chown == null && process.getuid) {
- options.chown = { uid: stats.uid, gid: stats.gid }
- }
- resolve()
- }
- })
- }
- })
- }).then(function thenWriteFile () {
- return new Promise(function (resolve, reject) {
- fs.open(tmpfile, 'w', options.mode, function (err, _fd) {
- fd = _fd
- if (err) reject(err)
- else resolve()
- })
- })
- }).then(function write () {
- return new Promise(function (resolve, reject) {
- if (Buffer.isBuffer(data)) {
- fs.write(fd, data, 0, data.length, 0, function (err) {
- if (err) reject(err)
- else resolve()
- })
- } else if (data != null) {
- fs.write(fd, String(data), 0, String(options.encoding || 'utf8'), function (err) {
- if (err) reject(err)
- else resolve()
- })
- } else resolve()
- })
- }).then(function syncAndClose () {
- return new Promise(function (resolve, reject) {
- if (options.fsync !== false) {
- fs.fsync(fd, function (err) {
- if (err) fs.close(fd, () => reject(err))
- else fs.close(fd, resolve)
- })
- } else {
- fs.close(fd, resolve)
- }
- })
- }).then(function chown () {
- fd = null
- if (options.chown) {
- return new Promise(function (resolve, reject) {
- fs.chown(tmpfile, options.chown.uid, options.chown.gid, function (err) {
- if (err) reject(err)
- else resolve()
- })
- })
+ /**
+ * Writes a BigUInt64BE value to the current write position (or at optional offset).
+ *
+ * @param value { Number } The value to write.
+ * @param offset { Number } The offset to write the value at.
+ *
+ * @return this
+ */
+ writeBigUInt64BE(value, offset) {
+ utils_1.bigIntAndBufferInt64Check('writeBigUInt64BE');
+ return this._writeNumberValue(Buffer.prototype.writeBigUInt64BE, 8, value, offset);
}
- }).then(function chmod () {
- if (options.mode) {
- return new Promise(function (resolve, reject) {
- fs.chmod(tmpfile, options.mode, function (err) {
- if (err) reject(err)
- else resolve()
- })
- })
+ /**
+ * Inserts a BigUInt64BE value at the given offset value.
+ *
+ * @param value { Number } The value to insert.
+ * @param offset { Number } The offset to insert the value at.
+ *
+ * @return this
+ */
+ insertBigUInt64BE(value, offset) {
+ utils_1.bigIntAndBufferInt64Check('writeBigUInt64BE');
+ return this._insertNumberValue(Buffer.prototype.writeBigUInt64BE, 8, value, offset);
}
- }).then(function rename () {
- return new Promise(function (resolve, reject) {
- fs.rename(tmpfile, truename, function (err) {
- if (err) reject(err)
- else resolve()
- })
- })
- }).then(function success () {
- removeOnExitHandler()
- callback()
- }, function fail (err) {
- return new Promise(resolve => {
- return fd ? fs.close(fd, resolve) : resolve()
- }).then(() => {
- removeOnExitHandler()
- fs.unlink(tmpfile, function () {
- callback(err)
- })
- })
- }).then(function checkQueue () {
- activeFiles[absoluteName].shift() // remove the element added by serializeSameFile
- if (activeFiles[absoluteName].length > 0) {
- activeFiles[absoluteName][0]() // start next job if one is pending
- } else delete activeFiles[absoluteName]
- })
-}
-
-function writeFileSync (filename, data, options) {
- if (typeof options === 'string') options = { encoding: options }
- else if (!options) options = {}
- try {
- filename = fs.realpathSync(filename)
- } catch (ex) {
- // it's ok, it'll happen on a not yet existing file
- }
- var tmpfile = getTmpname(filename)
-
- if (!options.mode || !options.chown) {
- // Either mode or chown is not explicitly set
- // Default behavior is to copy it from original file
- try {
- var stats = fs.statSync(filename)
- options = Object.assign({}, options)
- if (!options.mode) {
- options.mode = stats.mode
- }
- if (!options.chown && process.getuid) {
- options.chown = { uid: stats.uid, gid: stats.gid }
- }
- } catch (ex) {
- // ignore stat errors
+ /**
+ * Writes a BigUInt64LE value to the current write position (or at optional offset).
+ *
+ * @param value { Number } The value to write.
+ * @param offset { Number } The offset to write the value at.
+ *
+ * @return this
+ */
+ writeBigUInt64LE(value, offset) {
+ utils_1.bigIntAndBufferInt64Check('writeBigUInt64LE');
+ return this._writeNumberValue(Buffer.prototype.writeBigUInt64LE, 8, value, offset);
}
- }
-
- var fd
- var cleanup = cleanupOnExit(tmpfile)
- var removeOnExitHandler = onExit(cleanup)
-
- try {
- fd = fs.openSync(tmpfile, 'w', options.mode)
- if (Buffer.isBuffer(data)) {
- fs.writeSync(fd, data, 0, data.length, 0)
- } else if (data != null) {
- fs.writeSync(fd, String(data), 0, String(options.encoding || 'utf8'))
+ /**
+ * Inserts a BigUInt64LE value at the given offset value.
+ *
+ * @param value { Number } The value to insert.
+ * @param offset { Number } The offset to insert the value at.
+ *
+ * @return this
+ */
+ insertBigUInt64LE(value, offset) {
+ utils_1.bigIntAndBufferInt64Check('writeBigUInt64LE');
+ return this._insertNumberValue(Buffer.prototype.writeBigUInt64LE, 8, value, offset);
}
- if (options.fsync !== false) {
- fs.fsyncSync(fd)
+ // Floating Point
+ /**
+ * Reads an FloatBE value from the current read position or an optionally provided offset.
+ *
+ * @param offset { Number } The offset to read data from (optional)
+ * @return { Number }
+ */
+ readFloatBE(offset) {
+ return this._readNumberValue(Buffer.prototype.readFloatBE, 4, offset);
}
- fs.closeSync(fd)
- if (options.chown) fs.chownSync(tmpfile, options.chown.uid, options.chown.gid)
- if (options.mode) fs.chmodSync(tmpfile, options.mode)
- fs.renameSync(tmpfile, filename)
- removeOnExitHandler()
- } catch (err) {
- if (fd) {
- try {
- fs.closeSync(fd)
- } catch (ex) {
- // ignore close errors at this stage, error may have closed fd already.
- }
+ /**
+ * Reads an FloatLE value from the current read position or an optionally provided offset.
+ *
+ * @param offset { Number } The offset to read data from (optional)
+ * @return { Number }
+ */
+ readFloatLE(offset) {
+ return this._readNumberValue(Buffer.prototype.readFloatLE, 4, offset);
}
- removeOnExitHandler()
- cleanup()
- throw err
- }
-}
-
-
-/***/ }),
-/* 183 */
-/***/ (function(module) {
-
-function RetryOperation(timeouts, options) {
- // Compatibility for the old (timeouts, retryForever) signature
- if (typeof options === 'boolean') {
- options = { forever: options };
- }
-
- this._timeouts = timeouts;
- this._options = options || {};
- this._fn = null;
- this._errors = [];
- this._attempts = 1;
- this._operationTimeout = null;
- this._operationTimeoutCb = null;
- this._timeout = null;
-
- if (this._options.forever) {
- this._cachedTimeouts = this._timeouts.slice(0);
- }
-}
-module.exports = RetryOperation;
-
-RetryOperation.prototype.stop = function() {
- if (this._timeout) {
- clearTimeout(this._timeout);
- }
-
- this._timeouts = [];
- this._cachedTimeouts = null;
-};
-
-RetryOperation.prototype.retry = function(err) {
- if (this._timeout) {
- clearTimeout(this._timeout);
- }
-
- if (!err) {
- return false;
- }
-
- this._errors.push(err);
-
- var timeout = this._timeouts.shift();
- if (timeout === undefined) {
- if (this._cachedTimeouts) {
- // retry forever, only keep last error
- this._errors.splice(this._errors.length - 1, this._errors.length);
- this._timeouts = this._cachedTimeouts.slice(0);
- timeout = this._timeouts.shift();
- } else {
- return false;
+ /**
+ * Writes a FloatBE value to the current write position (or at optional offset).
+ *
+ * @param value { Number } The value to write.
+ * @param offset { Number } The offset to write the value at.
+ *
+ * @return this
+ */
+ writeFloatBE(value, offset) {
+ return this._writeNumberValue(Buffer.prototype.writeFloatBE, 4, value, offset);
}
- }
-
- var self = this;
- var timer = setTimeout(function() {
- self._attempts++;
-
- if (self._operationTimeoutCb) {
- self._timeout = setTimeout(function() {
- self._operationTimeoutCb(self._attempts);
- }, self._operationTimeout);
-
- if (this._options.unref) {
- self._timeout.unref();
- }
+ /**
+ * Inserts a FloatBE value at the given offset value.
+ *
+ * @param value { Number } The value to insert.
+ * @param offset { Number } The offset to insert the value at.
+ *
+ * @return this
+ */
+ insertFloatBE(value, offset) {
+ return this._insertNumberValue(Buffer.prototype.writeFloatBE, 4, value, offset);
}
-
- self._fn(self._attempts);
- }, timeout);
-
- if (this._options.unref) {
- timer.unref();
- }
-
- return true;
-};
-
-RetryOperation.prototype.attempt = function(fn, timeoutOps) {
- this._fn = fn;
-
- if (timeoutOps) {
- if (timeoutOps.timeout) {
- this._operationTimeout = timeoutOps.timeout;
+ /**
+ * Writes a FloatLE value to the current write position (or at optional offset).
+ *
+ * @param value { Number } The value to write.
+ * @param offset { Number } The offset to write the value at.
+ *
+ * @return this
+ */
+ writeFloatLE(value, offset) {
+ return this._writeNumberValue(Buffer.prototype.writeFloatLE, 4, value, offset);
}
- if (timeoutOps.cb) {
- this._operationTimeoutCb = timeoutOps.cb;
+ /**
+ * Inserts a FloatLE value at the given offset value.
+ *
+ * @param value { Number } The value to insert.
+ * @param offset { Number } The offset to insert the value at.
+ *
+ * @return this
+ */
+ insertFloatLE(value, offset) {
+ return this._insertNumberValue(Buffer.prototype.writeFloatLE, 4, value, offset);
}
- }
-
- var self = this;
- if (this._operationTimeoutCb) {
- this._timeout = setTimeout(function() {
- self._operationTimeoutCb();
- }, self._operationTimeout);
- }
-
- this._fn(this._attempts);
-};
-
-RetryOperation.prototype.try = function(fn) {
- console.log('Using RetryOperation.try() is deprecated');
- this.attempt(fn);
-};
-
-RetryOperation.prototype.start = function(fn) {
- console.log('Using RetryOperation.start() is deprecated');
- this.attempt(fn);
-};
-
-RetryOperation.prototype.start = RetryOperation.prototype.try;
-
-RetryOperation.prototype.errors = function() {
- return this._errors;
-};
-
-RetryOperation.prototype.attempts = function() {
- return this._attempts;
-};
-
-RetryOperation.prototype.mainError = function() {
- if (this._errors.length === 0) {
- return null;
- }
-
- var counts = {};
- var mainError = null;
- var mainErrorCount = 0;
-
- for (var i = 0; i < this._errors.length; i++) {
- var error = this._errors[i];
- var message = error.message;
- var count = (counts[message] || 0) + 1;
-
- counts[message] = count;
-
- if (count >= mainErrorCount) {
- mainError = error;
- mainErrorCount = count;
+ // Double Floating Point
+ /**
+ * Reads an DoublEBE value from the current read position or an optionally provided offset.
+ *
+ * @param offset { Number } The offset to read data from (optional)
+ * @return { Number }
+ */
+ readDoubleBE(offset) {
+ return this._readNumberValue(Buffer.prototype.readDoubleBE, 8, offset);
}
- }
-
- return mainError;
-};
-
-
-/***/ }),
-/* 184 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-"use strict";
-
-module.exports = move
-
-var nodeFs = __webpack_require__(747)
-var rimraf = __webpack_require__(993)
-var validate = __webpack_require__(997)
-var copy = __webpack_require__(555)
-var RunQueue = __webpack_require__(399)
-var extend = Object.assign || __webpack_require__(669)._extend
-
-function promisify (Promise, fn) {
- return function () {
- var args = [].slice.call(arguments)
- return new Promise(function (resolve, reject) {
- return fn.apply(null, args.concat(function (err, value) {
- if (err) {
- reject(err)
- } else {
- resolve(value)
- }
- }))
- })
- }
-}
-
-function move (from, to, opts) {
- validate('SSO|SS', arguments)
- opts = extend({}, opts || {})
-
- var Promise = opts.Promise || global.Promise
- var fs = opts.fs || nodeFs
- var rimrafAsync = promisify(Promise, rimraf)
- var renameAsync = promisify(Promise, fs.rename)
-
- opts.top = from
-
- var queue = new RunQueue({
- maxConcurrency: opts.maxConcurrency,
- Promise: Promise
- })
- opts.queue = queue
- opts.recurseWith = rename
-
- queue.add(0, rename, [from, to, opts])
-
- return queue.run().then(function () {
- return remove(from)
- }, function (err) {
- // if the target already exists don't clobber it
- if (err.code === 'EEXIST' || err.code === 'EPERM') {
- return passThroughError()
- } else {
- return remove(to).then(passThroughError, passThroughError)
+ /**
+ * Reads an DoubleLE value from the current read position or an optionally provided offset.
+ *
+ * @param offset { Number } The offset to read data from (optional)
+ * @return { Number }
+ */
+ readDoubleLE(offset) {
+ return this._readNumberValue(Buffer.prototype.readDoubleLE, 8, offset);
}
- function passThroughError () {
- return Promise.reject(err)
+ /**
+ * Writes a DoubleBE value to the current write position (or at optional offset).
+ *
+ * @param value { Number } The value to write.
+ * @param offset { Number } The offset to write the value at.
+ *
+ * @return this
+ */
+ writeDoubleBE(value, offset) {
+ return this._writeNumberValue(Buffer.prototype.writeDoubleBE, 8, value, offset);
}
- })
-
- function remove (target) {
- var opts = {
- unlink: fs.unlink,
- chmod: fs.chmod,
- stat: fs.stat,
- lstat: fs.lstat,
- rmdir: fs.rmdir,
- readdir: fs.readdir,
- glob: false
+ /**
+ * Inserts a DoubleBE value at the given offset value.
+ *
+ * @param value { Number } The value to insert.
+ * @param offset { Number } The offset to insert the value at.
+ *
+ * @return this
+ */
+ insertDoubleBE(value, offset) {
+ return this._insertNumberValue(Buffer.prototype.writeDoubleBE, 8, value, offset);
}
- return rimrafAsync(target, opts)
- }
-
- function rename (from, to, opts, done) {
- return renameAsync(from, to).catch(function (err) {
- if (err.code !== 'EXDEV') {
- return Promise.reject(err)
- } else {
- return remove(to).then(function () {
- return copy.item(from, to, opts)
- })
- }
- })
- }
-}
-
-
-/***/ }),
-/* 185 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-"use strict";
-
-
-const BB = __webpack_require__(489)
-
-const contentPath = __webpack_require__(969)
-const figgyPudding = __webpack_require__(122)
-const fs = __webpack_require__(598)
-const PassThrough = __webpack_require__(794).PassThrough
-const pipe = BB.promisify(__webpack_require__(371).pipe)
-const ssri = __webpack_require__(951)
-const Y = __webpack_require__(945)
-
-const lstatAsync = BB.promisify(fs.lstat)
-const readFileAsync = BB.promisify(fs.readFile)
-
-const ReadOpts = figgyPudding({
- size: {}
-})
-
-module.exports = read
-function read (cache, integrity, opts) {
- opts = ReadOpts(opts)
- return withContentSri(cache, integrity, (cpath, sri) => {
- return readFileAsync(cpath, null).then(data => {
- if (typeof opts.size === 'number' && opts.size !== data.length) {
- throw sizeError(opts.size, data.length)
- } else if (ssri.checkData(data, sri)) {
- return data
- } else {
- throw integrityError(sri, cpath)
- }
- })
- })
-}
-
-module.exports.sync = readSync
-function readSync (cache, integrity, opts) {
- opts = ReadOpts(opts)
- return withContentSriSync(cache, integrity, (cpath, sri) => {
- const data = fs.readFileSync(cpath)
- if (typeof opts.size === 'number' && opts.size !== data.length) {
- throw sizeError(opts.size, data.length)
- } else if (ssri.checkData(data, sri)) {
- return data
- } else {
- throw integrityError(sri, cpath)
+ /**
+ * Writes a DoubleLE value to the current write position (or at optional offset).
+ *
+ * @param value { Number } The value to write.
+ * @param offset { Number } The offset to write the value at.
+ *
+ * @return this
+ */
+ writeDoubleLE(value, offset) {
+ return this._writeNumberValue(Buffer.prototype.writeDoubleLE, 8, value, offset);
}
- })
-}
-
-module.exports.stream = readStream
-module.exports.readStream = readStream
-function readStream (cache, integrity, opts) {
- opts = ReadOpts(opts)
- const stream = new PassThrough()
- withContentSri(cache, integrity, (cpath, sri) => {
- return lstatAsync(cpath).then(stat => ({ cpath, sri, stat }))
- }).then(({ cpath, sri, stat }) => {
- return pipe(
- fs.createReadStream(cpath),
- ssri.integrityStream({
- integrity: sri,
- size: opts.size
- }),
- stream
- )
- }).catch(err => {
- stream.emit('error', err)
- })
- return stream
-}
-
-let copyFileAsync
-if (fs.copyFile) {
- module.exports.copy = copy
- module.exports.copy.sync = copySync
- copyFileAsync = BB.promisify(fs.copyFile)
-}
-
-function copy (cache, integrity, dest, opts) {
- opts = ReadOpts(opts)
- return withContentSri(cache, integrity, (cpath, sri) => {
- return copyFileAsync(cpath, dest)
- })
-}
-
-function copySync (cache, integrity, dest, opts) {
- opts = ReadOpts(opts)
- return withContentSriSync(cache, integrity, (cpath, sri) => {
- return fs.copyFileSync(cpath, dest)
- })
-}
-
-module.exports.hasContent = hasContent
-function hasContent (cache, integrity) {
- if (!integrity) { return BB.resolve(false) }
- return withContentSri(cache, integrity, (cpath, sri) => {
- return lstatAsync(cpath).then(stat => ({ size: stat.size, sri, stat }))
- }).catch(err => {
- if (err.code === 'ENOENT') { return false }
- if (err.code === 'EPERM') {
- if (process.platform !== 'win32') {
- throw err
- } else {
- return false
- }
+ /**
+ * Inserts a DoubleLE value at the given offset value.
+ *
+ * @param value { Number } The value to insert.
+ * @param offset { Number } The offset to insert the value at.
+ *
+ * @return this
+ */
+ insertDoubleLE(value, offset) {
+ return this._insertNumberValue(Buffer.prototype.writeDoubleLE, 8, value, offset);
}
- })
-}
-
-module.exports.hasContent.sync = hasContentSync
-function hasContentSync (cache, integrity) {
- if (!integrity) { return false }
- return withContentSriSync(cache, integrity, (cpath, sri) => {
- try {
- const stat = fs.lstatSync(cpath)
- return { size: stat.size, sri, stat }
- } catch (err) {
- if (err.code === 'ENOENT') { return false }
- if (err.code === 'EPERM') {
- if (process.platform !== 'win32') {
- throw err
- } else {
- return false
+ // Strings
+ /**
+ * Reads a String from the current read position.
+ *
+ * @param arg1 { Number | String } The number of bytes to read as a String, or the BufferEncoding to use for
+ * the string (Defaults to instance level encoding).
+ * @param encoding { String } The BufferEncoding to use for the string (Defaults to instance level encoding).
+ *
+ * @return { String }
+ */
+ readString(arg1, encoding) {
+ let lengthVal;
+ // Length provided
+ if (typeof arg1 === 'number') {
+ utils_1.checkLengthValue(arg1);
+ lengthVal = Math.min(arg1, this.length - this._readOffset);
}
- }
+ else {
+ encoding = arg1;
+ lengthVal = this.length - this._readOffset;
+ }
+ // Check encoding
+ if (typeof encoding !== 'undefined') {
+ utils_1.checkEncoding(encoding);
+ }
+ const value = this._buff.slice(this._readOffset, this._readOffset + lengthVal).toString(encoding || this._encoding);
+ this._readOffset += lengthVal;
+ return value;
}
- })
-}
-
-function withContentSri (cache, integrity, fn) {
- return BB.try(() => {
- const sri = ssri.parse(integrity)
- // If `integrity` has multiple entries, pick the first digest
- // with available local data.
- const algo = sri.pickAlgorithm()
- const digests = sri[algo]
- if (digests.length <= 1) {
- const cpath = contentPath(cache, digests[0])
- return fn(cpath, digests[0])
- } else {
- return BB.any(sri[sri.pickAlgorithm()].map(meta => {
- return withContentSri(cache, meta, fn)
- }, { concurrency: 1 }))
- .catch(err => {
- if ([].some.call(err, e => e.code === 'ENOENT')) {
- throw Object.assign(
- new Error('No matching content found for ' + sri.toString()),
- { code: 'ENOENT' }
- )
- } else {
- throw err[0]
- }
- })
+ /**
+ * Inserts a String
+ *
+ * @param value { String } The String value to insert.
+ * @param offset { Number } The offset to insert the string at.
+ * @param encoding { String } The BufferEncoding to use for writing strings (defaults to instance encoding).
+ *
+ * @return this
+ */
+ insertString(value, offset, encoding) {
+ utils_1.checkOffsetValue(offset);
+ return this._handleString(value, true, offset, encoding);
}
- })
-}
-
-function withContentSriSync (cache, integrity, fn) {
- const sri = ssri.parse(integrity)
- // If `integrity` has multiple entries, pick the first digest
- // with available local data.
- const algo = sri.pickAlgorithm()
- const digests = sri[algo]
- if (digests.length <= 1) {
- const cpath = contentPath(cache, digests[0])
- return fn(cpath, digests[0])
- } else {
- let lastErr = null
- for (const meta of sri[sri.pickAlgorithm()]) {
- try {
- return withContentSriSync(cache, meta, fn)
- } catch (err) {
- lastErr = err
- }
+ /**
+ * Writes a String
+ *
+ * @param value { String } The String value to write.
+ * @param arg2 { Number | String } The offset to write the string at, or the BufferEncoding to use.
+ * @param encoding { String } The BufferEncoding to use for writing strings (defaults to instance encoding).
+ *
+ * @return this
+ */
+ writeString(value, arg2, encoding) {
+ return this._handleString(value, false, arg2, encoding);
}
- if (lastErr) { throw lastErr }
- }
-}
-
-function sizeError (expected, found) {
- var err = new Error(Y`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
- err.expected = expected
- err.found = found
- err.code = 'EBADSIZE'
- return err
-}
-
-function integrityError (sri, path) {
- var err = new Error(Y`Integrity verification failed for ${sri} (${path})`)
- err.code = 'EINTEGRITY'
- err.sri = sri
- err.path = path
- return err
-}
-
-
-/***/ }),
-/* 186 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-"use strict";
-
-
-const BB = __webpack_require__(489)
-
-const contentPath = __webpack_require__(969)
-const fixOwner = __webpack_require__(133)
-const fs = __webpack_require__(598)
-const moveFile = __webpack_require__(201)
-const PassThrough = __webpack_require__(794).PassThrough
-const path = __webpack_require__(622)
-const pipe = BB.promisify(__webpack_require__(371).pipe)
-const rimraf = BB.promisify(__webpack_require__(503))
-const ssri = __webpack_require__(951)
-const to = __webpack_require__(371).to
-const uniqueFilename = __webpack_require__(94)
-const Y = __webpack_require__(945)
-
-const writeFileAsync = BB.promisify(fs.writeFile)
-
-module.exports = write
-function write (cache, data, opts) {
- opts = opts || {}
- if (opts.algorithms && opts.algorithms.length > 1) {
- throw new Error(
- Y`opts.algorithms only supports a single algorithm for now`
- )
- }
- if (typeof opts.size === 'number' && data.length !== opts.size) {
- return BB.reject(sizeError(opts.size, data.length))
- }
- const sri = ssri.fromData(data, {
- algorithms: opts.algorithms
- })
- if (opts.integrity && !ssri.checkData(data, opts.integrity, opts)) {
- return BB.reject(checksumError(opts.integrity, sri))
- }
- return BB.using(makeTmp(cache, opts), tmp => (
- writeFileAsync(
- tmp.target, data, { flag: 'wx' }
- ).then(() => (
- moveToDestination(tmp, cache, sri, opts)
- ))
- )).then(() => ({ integrity: sri, size: data.length }))
-}
-
-module.exports.stream = writeStream
-function writeStream (cache, opts) {
- opts = opts || {}
- const inputStream = new PassThrough()
- let inputErr = false
- function errCheck () {
- if (inputErr) { throw inputErr }
- }
-
- let allDone
- const ret = to((c, n, cb) => {
- if (!allDone) {
- allDone = handleContent(inputStream, cache, opts, errCheck)
+ /**
+ * Reads a null-terminated String from the current read position.
+ *
+ * @param encoding { String } The BufferEncoding to use for the string (Defaults to instance level encoding).
+ *
+ * @return { String }
+ */
+ readStringNT(encoding) {
+ if (typeof encoding !== 'undefined') {
+ utils_1.checkEncoding(encoding);
+ }
+ // Set null character position to the end SmartBuffer instance.
+ let nullPos = this.length;
+ // Find next null character (if one is not found, default from above is used)
+ for (let i = this._readOffset; i < this.length; i++) {
+ if (this._buff[i] === 0x00) {
+ nullPos = i;
+ break;
+ }
+ }
+ // Read string value
+ const value = this._buff.slice(this._readOffset, nullPos);
+ // Increment internal Buffer read offset
+ this._readOffset = nullPos + 1;
+ return value.toString(encoding || this._encoding);
}
- inputStream.write(c, n, cb)
- }, cb => {
- inputStream.end(() => {
- if (!allDone) {
- const e = new Error(Y`Cache input stream was empty`)
- e.code = 'ENODATA'
- return ret.emit('error', e)
- }
- allDone.then(res => {
- res.integrity && ret.emit('integrity', res.integrity)
- res.size !== null && ret.emit('size', res.size)
- cb()
- }, e => {
- ret.emit('error', e)
- })
- })
- })
- ret.once('error', e => {
- inputErr = e
- })
- return ret
-}
-
-function handleContent (inputStream, cache, opts, errCheck) {
- return BB.using(makeTmp(cache, opts), tmp => {
- errCheck()
- return pipeToTmp(
- inputStream, cache, tmp.target, opts, errCheck
- ).then(res => {
- return moveToDestination(
- tmp, cache, res.integrity, opts, errCheck
- ).then(() => res)
- })
- })
-}
-
-function pipeToTmp (inputStream, cache, tmpTarget, opts, errCheck) {
- return BB.resolve().then(() => {
- let integrity
- let size
- const hashStream = ssri.integrityStream({
- integrity: opts.integrity,
- algorithms: opts.algorithms,
- size: opts.size
- }).on('integrity', s => {
- integrity = s
- }).on('size', s => {
- size = s
- })
- const outStream = fs.createWriteStream(tmpTarget, {
- flags: 'wx'
- })
- errCheck()
- return pipe(inputStream, hashStream, outStream).then(() => {
- return { integrity, size }
- }).catch(err => {
- return rimraf(tmpTarget).then(() => { throw err })
- })
- })
-}
-
-function makeTmp (cache, opts) {
- const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
- return fixOwner.mkdirfix(
- cache, path.dirname(tmpTarget)
- ).then(() => ({
- target: tmpTarget,
- moved: false
- })).disposer(tmp => (!tmp.moved && rimraf(tmp.target)))
-}
-
-function moveToDestination (tmp, cache, sri, opts, errCheck) {
- errCheck && errCheck()
- const destination = contentPath(cache, sri)
- const destDir = path.dirname(destination)
-
- return fixOwner.mkdirfix(
- cache, destDir
- ).then(() => {
- errCheck && errCheck()
- return moveFile(tmp.target, destination)
- }).then(() => {
- errCheck && errCheck()
- tmp.moved = true
- return fixOwner.chownr(cache, destination)
- })
-}
-
-function sizeError (expected, found) {
- var err = new Error(Y`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
- err.expected = expected
- err.found = found
- err.code = 'EBADSIZE'
- return err
-}
-
-function checksumError (expected, found) {
- var err = new Error(Y`Integrity check failed:
- Wanted: ${expected}
- Found: ${found}`)
- err.code = 'EINTEGRITY'
- err.expected = expected
- err.found = found
- return err
-}
-
-
-/***/ }),
-/* 187 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-"use strict";
-
-var EventEmitter = __webpack_require__(614).EventEmitter
-var util = __webpack_require__(669)
-
-var trackerId = 0
-var TrackerBase = module.exports = function (name) {
- EventEmitter.call(this)
- this.id = ++trackerId
- this.name = name
-}
-util.inherits(TrackerBase, EventEmitter)
-
-
-/***/ }),
-/* 188 */
-/***/ (function(module) {
-
-/**
- * @preserve
- * JS Implementation of incremental MurmurHash3 (r150) (as of May 10, 2013)
- *
- * @author Jens Taylor
- * @see http://github.com/homebrewing/brauhaus-diff
- * @author Gary Court
- * @see http://github.com/garycourt/murmurhash-js
- * @author Austin Appleby
- * @see http://sites.google.com/site/murmurhash/
- */
-(function(){
- var cache;
-
- // Call this function without `new` to use the cached object (good for
- // single-threaded environments), or with `new` to create a new object.
- //
- // @param {string} key A UTF-16 or ASCII string
- // @param {number} seed An optional positive integer
- // @return {object} A MurmurHash3 object for incremental hashing
- function MurmurHash3(key, seed) {
- var m = this instanceof MurmurHash3 ? this : cache;
- m.reset(seed)
- if (typeof key === 'string' && key.length > 0) {
- m.hash(key);
- }
-
- if (m !== this) {
- return m;
- }
- };
-
- // Incrementally add a string to this hash
- //
- // @param {string} key A UTF-16 or ASCII string
- // @return {object} this
- MurmurHash3.prototype.hash = function(key) {
- var h1, k1, i, top, len;
-
- len = key.length;
- this.len += len;
-
- k1 = this.k1;
- i = 0;
- switch (this.rem) {
- case 0: k1 ^= len > i ? (key.charCodeAt(i++) & 0xffff) : 0;
- case 1: k1 ^= len > i ? (key.charCodeAt(i++) & 0xffff) << 8 : 0;
- case 2: k1 ^= len > i ? (key.charCodeAt(i++) & 0xffff) << 16 : 0;
- case 3:
- k1 ^= len > i ? (key.charCodeAt(i) & 0xff) << 24 : 0;
- k1 ^= len > i ? (key.charCodeAt(i++) & 0xff00) >> 8 : 0;
- }
-
- this.rem = (len + this.rem) & 3; // & 3 is same as % 4
- len -= this.rem;
- if (len > 0) {
- h1 = this.h1;
- while (1) {
- k1 = (k1 * 0x2d51 + (k1 & 0xffff) * 0xcc9e0000) & 0xffffffff;
- k1 = (k1 << 15) | (k1 >>> 17);
- k1 = (k1 * 0x3593 + (k1 & 0xffff) * 0x1b870000) & 0xffffffff;
-
- h1 ^= k1;
- h1 = (h1 << 13) | (h1 >>> 19);
- h1 = (h1 * 5 + 0xe6546b64) & 0xffffffff;
-
- if (i >= len) {
- break;
- }
-
- k1 = ((key.charCodeAt(i++) & 0xffff)) ^
- ((key.charCodeAt(i++) & 0xffff) << 8) ^
- ((key.charCodeAt(i++) & 0xffff) << 16);
- top = key.charCodeAt(i++);
- k1 ^= ((top & 0xff) << 24) ^
- ((top & 0xff00) >> 8);
- }
-
- k1 = 0;
- switch (this.rem) {
- case 3: k1 ^= (key.charCodeAt(i + 2) & 0xffff) << 16;
- case 2: k1 ^= (key.charCodeAt(i + 1) & 0xffff) << 8;
- case 1: k1 ^= (key.charCodeAt(i) & 0xffff);
- }
-
- this.h1 = h1;
- }
-
- this.k1 = k1;
+ /**
+ * Inserts a null-terminated String.
+ *
+ * @param value { String } The String value to write.
+ * @param arg2 { Number | String } The offset to write the string to, or the BufferEncoding to use.
+ * @param encoding { String } The BufferEncoding to use for writing strings (defaults to instance encoding).
+ *
+ * @return this
+ */
+ insertStringNT(value, offset, encoding) {
+ utils_1.checkOffsetValue(offset);
+ // Write Values
+ this.insertString(value, offset, encoding);
+ this.insertUInt8(0x00, offset + value.length);
return this;
- };
-
- // Get the result of this hash
- //
- // @return {number} The 32-bit hash
- MurmurHash3.prototype.result = function() {
- var k1, h1;
-
- k1 = this.k1;
- h1 = this.h1;
-
- if (k1 > 0) {
- k1 = (k1 * 0x2d51 + (k1 & 0xffff) * 0xcc9e0000) & 0xffffffff;
- k1 = (k1 << 15) | (k1 >>> 17);
- k1 = (k1 * 0x3593 + (k1 & 0xffff) * 0x1b870000) & 0xffffffff;
- h1 ^= k1;
- }
-
- h1 ^= this.len;
-
- h1 ^= h1 >>> 16;
- h1 = (h1 * 0xca6b + (h1 & 0xffff) * 0x85eb0000) & 0xffffffff;
- h1 ^= h1 >>> 13;
- h1 = (h1 * 0xae35 + (h1 & 0xffff) * 0xc2b20000) & 0xffffffff;
- h1 ^= h1 >>> 16;
-
- return h1 >>> 0;
- };
-
- // Reset the hash object for reuse
- //
- // @param {number} seed An optional positive integer
- MurmurHash3.prototype.reset = function(seed) {
- this.h1 = typeof seed === 'number' ? seed : 0;
- this.rem = this.k1 = this.len = 0;
+ }
+ /**
+ * Writes a null-terminated String.
+ *
+ * @param value { String } The String value to write.
+ * @param arg2 { Number | String } The offset to write the string to, or the BufferEncoding to use.
+ * @param encoding { String } The BufferEncoding to use for writing strings (defaults to instance encoding).
+ *
+ * @return this
+ */
+ writeStringNT(value, arg2, encoding) {
+ // Write Values
+ this.writeString(value, arg2, encoding);
+ this.writeUInt8(0x00, typeof arg2 === 'number' ? arg2 + value.length : this.writeOffset);
return this;
- };
-
- // A cached object to use. This can be safely used if you're in a single-
- // threaded environment, otherwise you need to create new hashes to use.
- cache = new MurmurHash3();
-
- if (true) {
- module.exports = MurmurHash3;
- } else {}
-}());
-
-
-/***/ }),
-/* 189 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-var Buffer = __webpack_require__(215).Buffer;
-
-// Multibyte codec. In this scheme, a character is represented by 1 or more bytes.
-// Our codec supports UTF-16 surrogates, extensions for GB18030 and unicode sequences.
-// To save memory and loading time, we read table files only when requested.
-
-exports._dbcs = DBCSCodec;
-
-var UNASSIGNED = -1,
- GB18030_CODE = -2,
- SEQ_START = -10,
- NODE_START = -1000,
- UNASSIGNED_NODE = new Array(0x100),
- DEF_CHAR = -1;
-
-for (var i = 0; i < 0x100; i++)
- UNASSIGNED_NODE[i] = UNASSIGNED;
-
-
-// Class DBCSCodec reads and initializes mapping tables.
-function DBCSCodec(codecOptions, iconv) {
- this.encodingName = codecOptions.encodingName;
- if (!codecOptions)
- throw new Error("DBCS codec is called without the data.")
- if (!codecOptions.table)
- throw new Error("Encoding '" + this.encodingName + "' has no data.");
-
- // Load tables.
- var mappingTable = codecOptions.table();
-
-
- // Decode tables: MBCS -> Unicode.
-
- // decodeTables is a trie, encoded as an array of arrays of integers. Internal arrays are trie nodes and all have len = 256.
- // Trie root is decodeTables[0].
- // Values: >= 0 -> unicode character code. can be > 0xFFFF
- // == UNASSIGNED -> unknown/unassigned sequence.
- // == GB18030_CODE -> this is the end of a GB18030 4-byte sequence.
- // <= NODE_START -> index of the next node in our trie to process next byte.
- // <= SEQ_START -> index of the start of a character code sequence, in decodeTableSeq.
- this.decodeTables = [];
- this.decodeTables[0] = UNASSIGNED_NODE.slice(0); // Create root node.
-
- // Sometimes a MBCS char corresponds to a sequence of unicode chars. We store them as arrays of integers here.
- this.decodeTableSeq = [];
-
- // Actual mapping tables consist of chunks. Use them to fill up decode tables.
- for (var i = 0; i < mappingTable.length; i++)
- this._addDecodeChunk(mappingTable[i]);
-
- // Load & create GB18030 tables when needed.
- if (typeof codecOptions.gb18030 === 'function') {
- this.gb18030 = codecOptions.gb18030(); // Load GB18030 ranges.
-
- // Add GB18030 common decode nodes.
- var commonThirdByteNodeIdx = this.decodeTables.length;
- this.decodeTables.push(UNASSIGNED_NODE.slice(0));
-
- var commonFourthByteNodeIdx = this.decodeTables.length;
- this.decodeTables.push(UNASSIGNED_NODE.slice(0));
-
- // Fill out the tree
- var firstByteNode = this.decodeTables[0];
- for (var i = 0x81; i <= 0xFE; i++) {
- var secondByteNode = this.decodeTables[NODE_START - firstByteNode[i]];
- for (var j = 0x30; j <= 0x39; j++) {
- if (secondByteNode[j] === UNASSIGNED) {
- secondByteNode[j] = NODE_START - commonThirdByteNodeIdx;
- } else if (secondByteNode[j] > NODE_START) {
- throw new Error("gb18030 decode tables conflict at byte 2");
- }
-
- var thirdByteNode = this.decodeTables[NODE_START - secondByteNode[j]];
- for (var k = 0x81; k <= 0xFE; k++) {
- if (thirdByteNode[k] === UNASSIGNED) {
- thirdByteNode[k] = NODE_START - commonFourthByteNodeIdx;
- } else if (thirdByteNode[k] === NODE_START - commonFourthByteNodeIdx) {
- continue;
- } else if (thirdByteNode[k] > NODE_START) {
- throw new Error("gb18030 decode tables conflict at byte 3");
- }
-
- var fourthByteNode = this.decodeTables[NODE_START - thirdByteNode[k]];
- for (var l = 0x30; l <= 0x39; l++) {
- if (fourthByteNode[l] === UNASSIGNED)
- fourthByteNode[l] = GB18030_CODE;
- }
- }
- }
- }
}
-
- this.defaultCharUnicode = iconv.defaultCharUnicode;
-
-
- // Encode tables: Unicode -> DBCS.
-
- // `encodeTable` is array mapping from unicode char to encoded char. All its values are integers for performance.
- // Because it can be sparse, it is represented as array of buckets by 256 chars each. Bucket can be null.
- // Values: >= 0 -> it is a normal char. Write the value (if <=256 then 1 byte, if <=65536 then 2 bytes, etc.).
- // == UNASSIGNED -> no conversion found. Output a default char.
- // <= SEQ_START -> it's an index in encodeTableSeq, see below. The character starts a sequence.
- this.encodeTable = [];
-
- // `encodeTableSeq` is used when a sequence of unicode characters is encoded as a single code. We use a tree of
- // objects where keys correspond to characters in sequence and leafs are the encoded dbcs values. A special DEF_CHAR key
- // means end of sequence (needed when one sequence is a strict subsequence of another).
- // Objects are kept separately from encodeTable to increase performance.
- this.encodeTableSeq = [];
-
- // Some chars can be decoded, but need not be encoded.
- var skipEncodeChars = {};
- if (codecOptions.encodeSkipVals)
- for (var i = 0; i < codecOptions.encodeSkipVals.length; i++) {
- var val = codecOptions.encodeSkipVals[i];
- if (typeof val === 'number')
- skipEncodeChars[val] = true;
- else
- for (var j = val.from; j <= val.to; j++)
- skipEncodeChars[j] = true;
+ // Buffers
+ /**
+ * Reads a Buffer from the internal read position.
+ *
+ * @param length { Number } The length of data to read as a Buffer.
+ *
+ * @return { Buffer }
+ */
+ readBuffer(length) {
+ if (typeof length !== 'undefined') {
+ utils_1.checkLengthValue(length);
}
-
- // Use decode trie to recursively fill out encode tables.
- this._fillEncodeTable(0, 0, skipEncodeChars);
-
- // Add more encoding pairs when needed.
- if (codecOptions.encodeAdd) {
- for (var uChar in codecOptions.encodeAdd)
- if (Object.prototype.hasOwnProperty.call(codecOptions.encodeAdd, uChar))
- this._setEncodeChar(uChar.charCodeAt(0), codecOptions.encodeAdd[uChar]);
+ const lengthVal = typeof length === 'number' ? length : this.length;
+ const endPoint = Math.min(this.length, this._readOffset + lengthVal);
+ // Read buffer value
+ const value = this._buff.slice(this._readOffset, endPoint);
+ // Increment internal Buffer read offset
+ this._readOffset = endPoint;
+ return value;
}
-
- this.defCharSB = this.encodeTable[0][iconv.defaultCharSingleByte.charCodeAt(0)];
- if (this.defCharSB === UNASSIGNED) this.defCharSB = this.encodeTable[0]['?'];
- if (this.defCharSB === UNASSIGNED) this.defCharSB = "?".charCodeAt(0);
-}
-
-DBCSCodec.prototype.encoder = DBCSEncoder;
-DBCSCodec.prototype.decoder = DBCSDecoder;
-
-// Decoder helpers
-DBCSCodec.prototype._getDecodeTrieNode = function(addr) {
- var bytes = [];
- for (; addr > 0; addr >>>= 8)
- bytes.push(addr & 0xFF);
- if (bytes.length == 0)
- bytes.push(0);
-
- var node = this.decodeTables[0];
- for (var i = bytes.length-1; i > 0; i--) { // Traverse nodes deeper into the trie.
- var val = node[bytes[i]];
-
- if (val == UNASSIGNED) { // Create new node.
- node[bytes[i]] = NODE_START - this.decodeTables.length;
- this.decodeTables.push(node = UNASSIGNED_NODE.slice(0));
- }
- else if (val <= NODE_START) { // Existing node.
- node = this.decodeTables[NODE_START - val];
- }
- else
- throw new Error("Overwrite byte in " + this.encodingName + ", addr: " + addr.toString(16));
+ /**
+ * Writes a Buffer to the current write position.
+ *
+ * @param value { Buffer } The Buffer to write.
+ * @param offset { Number } The offset to write the Buffer to.
+ *
+ * @return this
+ */
+ insertBuffer(value, offset) {
+ utils_1.checkOffsetValue(offset);
+ return this._handleBuffer(value, true, offset);
}
- return node;
-}
-
-
-DBCSCodec.prototype._addDecodeChunk = function(chunk) {
- // First element of chunk is the hex mbcs code where we start.
- var curAddr = parseInt(chunk[0], 16);
-
- // Choose the decoding node where we'll write our chars.
- var writeTable = this._getDecodeTrieNode(curAddr);
- curAddr = curAddr & 0xFF;
-
- // Write all other elements of the chunk to the table.
- for (var k = 1; k < chunk.length; k++) {
- var part = chunk[k];
- if (typeof part === "string") { // String, write as-is.
- for (var l = 0; l < part.length;) {
- var code = part.charCodeAt(l++);
- if (0xD800 <= code && code < 0xDC00) { // Decode surrogate
- var codeTrail = part.charCodeAt(l++);
- if (0xDC00 <= codeTrail && codeTrail < 0xE000)
- writeTable[curAddr++] = 0x10000 + (code - 0xD800) * 0x400 + (codeTrail - 0xDC00);
- else
- throw new Error("Incorrect surrogate pair in " + this.encodingName + " at chunk " + chunk[0]);
- }
- else if (0x0FF0 < code && code <= 0x0FFF) { // Character sequence (our own encoding used)
- var len = 0xFFF - code + 2;
- var seq = [];
- for (var m = 0; m < len; m++)
- seq.push(part.charCodeAt(l++)); // Simple variation: don't support surrogates or subsequences in seq.
-
- writeTable[curAddr++] = SEQ_START - this.decodeTableSeq.length;
- this.decodeTableSeq.push(seq);
- }
- else
- writeTable[curAddr++] = code; // Basic char
+ /**
+ * Writes a Buffer to the current write position.
+ *
+ * @param value { Buffer } The Buffer to write.
+ * @param offset { Number } The offset to write the Buffer to.
+ *
+ * @return this
+ */
+ writeBuffer(value, offset) {
+ return this._handleBuffer(value, false, offset);
+ }
+ /**
+ * Reads a null-terminated Buffer from the current read poisiton.
+ *
+ * @return { Buffer }
+ */
+ readBufferNT() {
+ // Set null character position to the end SmartBuffer instance.
+ let nullPos = this.length;
+ // Find next null character (if one is not found, default from above is used)
+ for (let i = this._readOffset; i < this.length; i++) {
+ if (this._buff[i] === 0x00) {
+ nullPos = i;
+ break;
}
- }
- else if (typeof part === "number") { // Integer, meaning increasing sequence starting with prev character.
- var charCode = writeTable[curAddr - 1] + 1;
- for (var l = 0; l < part; l++)
- writeTable[curAddr++] = charCode++;
}
- else
- throw new Error("Incorrect type '" + typeof part + "' given in " + this.encodingName + " at chunk " + chunk[0]);
+ // Read value
+ const value = this._buff.slice(this._readOffset, nullPos);
+ // Increment internal Buffer read offset
+ this._readOffset = nullPos + 1;
+ return value;
}
- if (curAddr > 0xFF)
- throw new Error("Incorrect chunk in " + this.encodingName + " at addr " + chunk[0] + ": too long" + curAddr);
-}
-
-// Encoder helpers
-DBCSCodec.prototype._getEncodeBucket = function(uCode) {
- var high = uCode >> 8; // This could be > 0xFF because of astral characters.
- if (this.encodeTable[high] === undefined)
- this.encodeTable[high] = UNASSIGNED_NODE.slice(0); // Create bucket on demand.
- return this.encodeTable[high];
-}
-
-DBCSCodec.prototype._setEncodeChar = function(uCode, dbcsCode) {
- var bucket = this._getEncodeBucket(uCode);
- var low = uCode & 0xFF;
- if (bucket[low] <= SEQ_START)
- this.encodeTableSeq[SEQ_START-bucket[low]][DEF_CHAR] = dbcsCode; // There's already a sequence, set a single-char subsequence of it.
- else if (bucket[low] == UNASSIGNED)
- bucket[low] = dbcsCode;
-}
-
-DBCSCodec.prototype._setEncodeSequence = function(seq, dbcsCode) {
-
- // Get the root of character tree according to first character of the sequence.
- var uCode = seq[0];
- var bucket = this._getEncodeBucket(uCode);
- var low = uCode & 0xFF;
-
- var node;
- if (bucket[low] <= SEQ_START) {
- // There's already a sequence with - use it.
- node = this.encodeTableSeq[SEQ_START-bucket[low]];
- }
- else {
- // There was no sequence object - allocate a new one.
- node = {};
- if (bucket[low] !== UNASSIGNED) node[DEF_CHAR] = bucket[low]; // If a char was set before - make it a single-char subsequence.
- bucket[low] = SEQ_START - this.encodeTableSeq.length;
- this.encodeTableSeq.push(node);
+ /**
+ * Inserts a null-terminated Buffer.
+ *
+ * @param value { Buffer } The Buffer to write.
+ * @param offset { Number } The offset to write the Buffer to.
+ *
+ * @return this
+ */
+ insertBufferNT(value, offset) {
+ utils_1.checkOffsetValue(offset);
+ // Write Values
+ this.insertBuffer(value, offset);
+ this.insertUInt8(0x00, offset + value.length);
+ return this;
}
-
- // Traverse the character tree, allocating new nodes as needed.
- for (var j = 1; j < seq.length-1; j++) {
- var oldVal = node[uCode];
- if (typeof oldVal === 'object')
- node = oldVal;
- else {
- node = node[uCode] = {}
- if (oldVal !== undefined)
- node[DEF_CHAR] = oldVal
+ /**
+ * Writes a null-terminated Buffer.
+ *
+ * @param value { Buffer } The Buffer to write.
+ * @param offset { Number } The offset to write the Buffer to.
+ *
+ * @return this
+ */
+ writeBufferNT(value, offset) {
+ // Checks for valid numberic value;
+ if (typeof offset !== 'undefined') {
+ utils_1.checkOffsetValue(offset);
}
+ // Write Values
+ this.writeBuffer(value, offset);
+ this.writeUInt8(0x00, typeof offset === 'number' ? offset + value.length : this._writeOffset);
+ return this;
}
-
- // Set the leaf to given dbcsCode.
- uCode = seq[seq.length-1];
- node[uCode] = dbcsCode;
-}
-
-DBCSCodec.prototype._fillEncodeTable = function(nodeIdx, prefix, skipEncodeChars) {
- var node = this.decodeTables[nodeIdx];
- var hasValues = false;
- var subNodeEmpty = {};
- for (var i = 0; i < 0x100; i++) {
- var uCode = node[i];
- var mbCode = prefix + i;
- if (skipEncodeChars[mbCode])
- continue;
-
- if (uCode >= 0) {
- this._setEncodeChar(uCode, mbCode);
- hasValues = true;
- } else if (uCode <= NODE_START) {
- var subNodeIdx = NODE_START - uCode;
- if (!subNodeEmpty[subNodeIdx]) { // Skip empty subtrees (they are too large in gb18030).
- var newPrefix = (mbCode << 8) >>> 0; // NOTE: '>>> 0' keeps 32-bit num positive.
- if (this._fillEncodeTable(subNodeIdx, newPrefix, skipEncodeChars))
- hasValues = true;
- else
- subNodeEmpty[subNodeIdx] = true;
- }
- } else if (uCode <= SEQ_START) {
- this._setEncodeSequence(this.decodeTableSeq[SEQ_START - uCode], mbCode);
- hasValues = true;
- }
+ /**
+ * Clears the SmartBuffer instance to its original empty state.
+ */
+ clear() {
+ this._writeOffset = 0;
+ this._readOffset = 0;
+ this.length = 0;
+ return this;
}
- return hasValues;
-}
-
-
-
-// == Encoder ==================================================================
-
-function DBCSEncoder(options, codec) {
- // Encoder state
- this.leadSurrogate = -1;
- this.seqObj = undefined;
-
- // Static data
- this.encodeTable = codec.encodeTable;
- this.encodeTableSeq = codec.encodeTableSeq;
- this.defaultCharSingleByte = codec.defCharSB;
- this.gb18030 = codec.gb18030;
-}
-
-DBCSEncoder.prototype.write = function(str) {
- var newBuf = Buffer.alloc(str.length * (this.gb18030 ? 4 : 3)),
- leadSurrogate = this.leadSurrogate,
- seqObj = this.seqObj, nextChar = -1,
- i = 0, j = 0;
-
- while (true) {
- // 0. Get next character.
- if (nextChar === -1) {
- if (i == str.length) break;
- var uCode = str.charCodeAt(i++);
+ /**
+ * Gets the remaining data left to be read from the SmartBuffer instance.
+ *
+ * @return { Number }
+ */
+ remaining() {
+ return this.length - this._readOffset;
+ }
+ /**
+ * Gets the current read offset value of the SmartBuffer instance.
+ *
+ * @return { Number }
+ */
+ get readOffset() {
+ return this._readOffset;
+ }
+ /**
+ * Sets the read offset value of the SmartBuffer instance.
+ *
+ * @param offset { Number } - The offset value to set.
+ */
+ set readOffset(offset) {
+ utils_1.checkOffsetValue(offset);
+ // Check for bounds.
+ utils_1.checkTargetOffset(offset, this);
+ this._readOffset = offset;
+ }
+ /**
+ * Gets the current write offset value of the SmartBuffer instance.
+ *
+ * @return { Number }
+ */
+ get writeOffset() {
+ return this._writeOffset;
+ }
+ /**
+ * Sets the write offset value of the SmartBuffer instance.
+ *
+ * @param offset { Number } - The offset value to set.
+ */
+ set writeOffset(offset) {
+ utils_1.checkOffsetValue(offset);
+ // Check for bounds.
+ utils_1.checkTargetOffset(offset, this);
+ this._writeOffset = offset;
+ }
+ /**
+ * Gets the currently set string encoding of the SmartBuffer instance.
+ *
+ * @return { BufferEncoding } The string Buffer encoding currently set.
+ */
+ get encoding() {
+ return this._encoding;
+ }
+ /**
+ * Sets the string encoding of the SmartBuffer instance.
+ *
+ * @param encoding { BufferEncoding } The string Buffer encoding to set.
+ */
+ set encoding(encoding) {
+ utils_1.checkEncoding(encoding);
+ this._encoding = encoding;
+ }
+ /**
+ * Gets the underlying internal Buffer. (This includes unmanaged data in the Buffer)
+ *
+ * @return { Buffer } The Buffer value.
+ */
+ get internalBuffer() {
+ return this._buff;
+ }
+ /**
+ * Gets the value of the internal managed Buffer (Includes managed data only)
+ *
+ * @param { Buffer }
+ */
+ toBuffer() {
+ return this._buff.slice(0, this.length);
+ }
+ /**
+ * Gets the String value of the internal managed Buffer
+ *
+ * @param encoding { String } The BufferEncoding to display the Buffer as (defaults to instance level encoding).
+ */
+ toString(encoding) {
+ const encodingVal = typeof encoding === 'string' ? encoding : this._encoding;
+ // Check for invalid encoding.
+ utils_1.checkEncoding(encodingVal);
+ return this._buff.toString(encodingVal, 0, this.length);
+ }
+ /**
+ * Destroys the SmartBuffer instance.
+ */
+ destroy() {
+ this.clear();
+ return this;
+ }
+ /**
+ * Handles inserting and writing strings.
+ *
+ * @param value { String } The String value to insert.
+ * @param isInsert { Boolean } True if inserting a string, false if writing.
+ * @param arg2 { Number | String } The offset to insert the string at, or the BufferEncoding to use.
+ * @param encoding { String } The BufferEncoding to use for writing strings (defaults to instance encoding).
+ */
+ _handleString(value, isInsert, arg3, encoding) {
+ let offsetVal = this._writeOffset;
+ let encodingVal = this._encoding;
+ // Check for offset
+ if (typeof arg3 === 'number') {
+ offsetVal = arg3;
+ // Check for encoding
}
- else {
- var uCode = nextChar;
- nextChar = -1;
+ else if (typeof arg3 === 'string') {
+ utils_1.checkEncoding(arg3);
+ encodingVal = arg3;
}
-
- // 1. Handle surrogates.
- if (0xD800 <= uCode && uCode < 0xE000) { // Char is one of surrogates.
- if (uCode < 0xDC00) { // We've got lead surrogate.
- if (leadSurrogate === -1) {
- leadSurrogate = uCode;
- continue;
- } else {
- leadSurrogate = uCode;
- // Double lead surrogate found.
- uCode = UNASSIGNED;
- }
- } else { // We've got trail surrogate.
- if (leadSurrogate !== -1) {
- uCode = 0x10000 + (leadSurrogate - 0xD800) * 0x400 + (uCode - 0xDC00);
- leadSurrogate = -1;
- } else {
- // Incomplete surrogate pair - only trail surrogate found.
- uCode = UNASSIGNED;
- }
-
- }
+ // Check for encoding (third param)
+ if (typeof encoding === 'string') {
+ utils_1.checkEncoding(encoding);
+ encodingVal = encoding;
}
- else if (leadSurrogate !== -1) {
- // Incomplete surrogate pair - only lead surrogate found.
- nextChar = uCode; uCode = UNASSIGNED; // Write an error, then current char.
- leadSurrogate = -1;
+ // Calculate bytelength of string.
+ const byteLength = Buffer.byteLength(value, encodingVal);
+ // Ensure there is enough internal Buffer capacity.
+ if (isInsert) {
+ this.ensureInsertable(byteLength, offsetVal);
}
-
- // 2. Convert uCode character.
- var dbcsCode = UNASSIGNED;
- if (seqObj !== undefined && uCode != UNASSIGNED) { // We are in the middle of the sequence
- var resCode = seqObj[uCode];
- if (typeof resCode === 'object') { // Sequence continues.
- seqObj = resCode;
- continue;
-
- } else if (typeof resCode == 'number') { // Sequence finished. Write it.
- dbcsCode = resCode;
-
- } else if (resCode == undefined) { // Current character is not part of the sequence.
-
- // Try default character for this sequence
- resCode = seqObj[DEF_CHAR];
- if (resCode !== undefined) {
- dbcsCode = resCode; // Found. Write it.
- nextChar = uCode; // Current character will be written too in the next iteration.
-
- } else {
- // TODO: What if we have no default? (resCode == undefined)
- // Then, we should write first char of the sequence as-is and try the rest recursively.
- // Didn't do it for now because no encoding has this situation yet.
- // Currently, just skip the sequence and write current char.
- }
- }
- seqObj = undefined;
+ else {
+ this._ensureWriteable(byteLength, offsetVal);
}
- else if (uCode >= 0) { // Regular character
- var subtable = this.encodeTable[uCode >> 8];
- if (subtable !== undefined)
- dbcsCode = subtable[uCode & 0xFF];
-
- if (dbcsCode <= SEQ_START) { // Sequence start
- seqObj = this.encodeTableSeq[SEQ_START-dbcsCode];
- continue;
+ // Write value
+ this._buff.write(value, offsetVal, byteLength, encodingVal);
+ // Increment internal Buffer write offset;
+ if (isInsert) {
+ this._writeOffset += byteLength;
+ }
+ else {
+ // If an offset was given, check to see if we wrote beyond the current writeOffset.
+ if (typeof arg3 === 'number') {
+ this._writeOffset = Math.max(this._writeOffset, offsetVal + byteLength);
}
-
- if (dbcsCode == UNASSIGNED && this.gb18030) {
- // Use GB18030 algorithm to find character(s) to write.
- var idx = findIdx(this.gb18030.uChars, uCode);
- if (idx != -1) {
- var dbcsCode = this.gb18030.gbChars[idx] + (uCode - this.gb18030.uChars[idx]);
- newBuf[j++] = 0x81 + Math.floor(dbcsCode / 12600); dbcsCode = dbcsCode % 12600;
- newBuf[j++] = 0x30 + Math.floor(dbcsCode / 1260); dbcsCode = dbcsCode % 1260;
- newBuf[j++] = 0x81 + Math.floor(dbcsCode / 10); dbcsCode = dbcsCode % 10;
- newBuf[j++] = 0x30 + dbcsCode;
- continue;
- }
+ else {
+ // If no offset was given, we wrote to the end of the SmartBuffer so increment writeOffset.
+ this._writeOffset += byteLength;
}
}
-
- // 3. Write dbcsCode character.
- if (dbcsCode === UNASSIGNED)
- dbcsCode = this.defaultCharSingleByte;
-
- if (dbcsCode < 0x100) {
- newBuf[j++] = dbcsCode;
+ return this;
+ }
+ /**
+ * Handles writing or insert of a Buffer.
+ *
+ * @param value { Buffer } The Buffer to write.
+ * @param offset { Number } The offset to write the Buffer to.
+ */
+ _handleBuffer(value, isInsert, offset) {
+ const offsetVal = typeof offset === 'number' ? offset : this._writeOffset;
+ // Ensure there is enough internal Buffer capacity.
+ if (isInsert) {
+ this.ensureInsertable(value.length, offsetVal);
}
- else if (dbcsCode < 0x10000) {
- newBuf[j++] = dbcsCode >> 8; // high byte
- newBuf[j++] = dbcsCode & 0xFF; // low byte
+ else {
+ this._ensureWriteable(value.length, offsetVal);
}
- else if (dbcsCode < 0x1000000) {
- newBuf[j++] = dbcsCode >> 16;
- newBuf[j++] = (dbcsCode >> 8) & 0xFF;
- newBuf[j++] = dbcsCode & 0xFF;
- } else {
- newBuf[j++] = dbcsCode >>> 24;
- newBuf[j++] = (dbcsCode >>> 16) & 0xFF;
- newBuf[j++] = (dbcsCode >>> 8) & 0xFF;
- newBuf[j++] = dbcsCode & 0xFF;
+ // Write buffer value
+ value.copy(this._buff, offsetVal);
+ // Increment internal Buffer write offset;
+ if (isInsert) {
+ this._writeOffset += value.length;
}
- }
-
- this.seqObj = seqObj;
- this.leadSurrogate = leadSurrogate;
- return newBuf.slice(0, j);
-}
-
-DBCSEncoder.prototype.end = function() {
- if (this.leadSurrogate === -1 && this.seqObj === undefined)
- return; // All clean. Most often case.
-
- var newBuf = Buffer.alloc(10), j = 0;
-
- if (this.seqObj) { // We're in the sequence.
- var dbcsCode = this.seqObj[DEF_CHAR];
- if (dbcsCode !== undefined) { // Write beginning of the sequence.
- if (dbcsCode < 0x100) {
- newBuf[j++] = dbcsCode;
+ else {
+ // If an offset was given, check to see if we wrote beyond the current writeOffset.
+ if (typeof offset === 'number') {
+ this._writeOffset = Math.max(this._writeOffset, offsetVal + value.length);
}
else {
- newBuf[j++] = dbcsCode >> 8; // high byte
- newBuf[j++] = dbcsCode & 0xFF; // low byte
+ // If no offset was given, we wrote to the end of the SmartBuffer so increment writeOffset.
+ this._writeOffset += value.length;
}
- } else {
- // See todo above.
}
- this.seqObj = undefined;
- }
-
- if (this.leadSurrogate !== -1) {
- // Incomplete surrogate pair - only lead surrogate found.
- newBuf[j++] = this.defaultCharSingleByte;
- this.leadSurrogate = -1;
+ return this;
}
-
- return newBuf.slice(0, j);
-}
-
-// Export for testing
-DBCSEncoder.prototype.findIdx = findIdx;
-
-
-// == Decoder ==================================================================
-
-function DBCSDecoder(options, codec) {
- // Decoder state
- this.nodeIdx = 0;
- this.prevBytes = [];
-
- // Static data
- this.decodeTables = codec.decodeTables;
- this.decodeTableSeq = codec.decodeTableSeq;
- this.defaultCharUnicode = codec.defaultCharUnicode;
- this.gb18030 = codec.gb18030;
-}
-
-DBCSDecoder.prototype.write = function(buf) {
- var newBuf = Buffer.alloc(buf.length*2),
- nodeIdx = this.nodeIdx,
- prevBytes = this.prevBytes, prevOffset = this.prevBytes.length,
- seqStart = -this.prevBytes.length, // idx of the start of current parsed sequence.
- uCode;
-
- for (var i = 0, j = 0; i < buf.length; i++) {
- var curByte = (i >= 0) ? buf[i] : prevBytes[i + prevOffset];
-
- // Lookup in current trie node.
- var uCode = this.decodeTables[nodeIdx][curByte];
-
- if (uCode >= 0) {
- // Normal character, just use it.
+ /**
+ * Ensures that the internal Buffer is large enough to read data.
+ *
+ * @param length { Number } The length of the data that needs to be read.
+ * @param offset { Number } The offset of the data that needs to be read.
+ */
+ ensureReadable(length, offset) {
+ // Offset value defaults to managed read offset.
+ let offsetVal = this._readOffset;
+ // If an offset was provided, use it.
+ if (typeof offset !== 'undefined') {
+ // Checks for valid numberic value;
+ utils_1.checkOffsetValue(offset);
+ // Overide with custom offset.
+ offsetVal = offset;
}
- else if (uCode === UNASSIGNED) { // Unknown char.
- // TODO: Callback with seq.
- uCode = this.defaultCharUnicode.charCodeAt(0);
- i = seqStart; // Skip one byte ('i' will be incremented by the for loop) and try to parse again.
+ // Checks if offset is below zero, or the offset+length offset is beyond the total length of the managed data.
+ if (offsetVal < 0 || offsetVal + length > this.length) {
+ throw new Error(utils_1.ERRORS.INVALID_READ_BEYOND_BOUNDS);
}
- else if (uCode === GB18030_CODE) {
- if (i >= 3) {
- var ptr = (buf[i-3]-0x81)*12600 + (buf[i-2]-0x30)*1260 + (buf[i-1]-0x81)*10 + (curByte-0x30);
- } else {
- var ptr = (prevBytes[i-3+prevOffset]-0x81)*12600 +
- (((i-2 >= 0) ? buf[i-2] : prevBytes[i-2+prevOffset])-0x30)*1260 +
- (((i-1 >= 0) ? buf[i-1] : prevBytes[i-1+prevOffset])-0x81)*10 +
- (curByte-0x30);
- }
- var idx = findIdx(this.gb18030.gbChars, ptr);
- uCode = this.gb18030.uChars[idx] + ptr - this.gb18030.gbChars[idx];
+ }
+ /**
+ * Ensures that the internal Buffer is large enough to insert data.
+ *
+ * @param dataLength { Number } The length of the data that needs to be written.
+ * @param offset { Number } The offset of the data to be written.
+ */
+ ensureInsertable(dataLength, offset) {
+ // Checks for valid numberic value;
+ utils_1.checkOffsetValue(offset);
+ // Ensure there is enough internal Buffer capacity.
+ this._ensureCapacity(this.length + dataLength);
+ // If an offset was provided and its not the very end of the buffer, copy data into appropriate location in regards to the offset.
+ if (offset < this.length) {
+ this._buff.copy(this._buff, offset + dataLength, offset, this._buff.length);
}
- else if (uCode <= NODE_START) { // Go to next trie node.
- nodeIdx = NODE_START - uCode;
- continue;
+ // Adjust tracked smart buffer length
+ if (offset + dataLength > this.length) {
+ this.length = offset + dataLength;
}
- else if (uCode <= SEQ_START) { // Output a sequence of chars.
- var seq = this.decodeTableSeq[SEQ_START - uCode];
- for (var k = 0; k < seq.length - 1; k++) {
- uCode = seq[k];
- newBuf[j++] = uCode & 0xFF;
- newBuf[j++] = uCode >> 8;
+ else {
+ this.length += dataLength;
+ }
+ }
+ /**
+ * Ensures that the internal Buffer is large enough to write data.
+ *
+ * @param dataLength { Number } The length of the data that needs to be written.
+ * @param offset { Number } The offset of the data to be written (defaults to writeOffset).
+ */
+ _ensureWriteable(dataLength, offset) {
+ const offsetVal = typeof offset === 'number' ? offset : this._writeOffset;
+ // Ensure enough capacity to write data.
+ this._ensureCapacity(offsetVal + dataLength);
+ // Adjust SmartBuffer length (if offset + length is larger than managed length, adjust length)
+ if (offsetVal + dataLength > this.length) {
+ this.length = offsetVal + dataLength;
+ }
+ }
+ /**
+ * Ensures that the internal Buffer is large enough to write at least the given amount of data.
+ *
+ * @param minLength { Number } The minimum length of the data needs to be written.
+ */
+ _ensureCapacity(minLength) {
+ const oldLength = this._buff.length;
+ if (minLength > oldLength) {
+ let data = this._buff;
+ let newLength = (oldLength * 3) / 2 + 1;
+ if (newLength < minLength) {
+ newLength = minLength;
}
- uCode = seq[seq.length-1];
+ this._buff = Buffer.allocUnsafe(newLength);
+ data.copy(this._buff, 0, 0, oldLength);
}
- else
- throw new Error("iconv-lite internal error: invalid decoding table value " + uCode + " at " + nodeIdx + "/" + curByte);
-
- // Write the character to buffer, handling higher planes using surrogate pair.
- if (uCode >= 0x10000) {
- uCode -= 0x10000;
- var uCodeLead = 0xD800 | (uCode >> 10);
- newBuf[j++] = uCodeLead & 0xFF;
- newBuf[j++] = uCodeLead >> 8;
-
- uCode = 0xDC00 | (uCode & 0x3FF);
+ }
+ /**
+ * Reads a numeric number value using the provided function.
+ *
+ * @typeparam T { number | bigint } The type of the value to be read
+ *
+ * @param func { Function(offset: number) => number } The function to read data on the internal Buffer with.
+ * @param byteSize { Number } The number of bytes read.
+ * @param offset { Number } The offset to read from (optional). When this is not provided, the managed readOffset is used instead.
+ *
+ * @returns { T } the number value
+ */
+ _readNumberValue(func, byteSize, offset) {
+ this.ensureReadable(byteSize, offset);
+ // Call Buffer.readXXXX();
+ const value = func.call(this._buff, typeof offset === 'number' ? offset : this._readOffset);
+ // Adjust internal read offset if an optional read offset was not provided.
+ if (typeof offset === 'undefined') {
+ this._readOffset += byteSize;
}
- newBuf[j++] = uCode & 0xFF;
- newBuf[j++] = uCode >> 8;
-
- // Reset trie node.
- nodeIdx = 0; seqStart = i+1;
+ return value;
}
-
- this.nodeIdx = nodeIdx;
- this.prevBytes = (seqStart >= 0)
- ? Array.prototype.slice.call(buf, seqStart)
- : prevBytes.slice(seqStart + prevOffset).concat(Array.prototype.slice.call(buf));
-
- return newBuf.slice(0, j).toString('ucs2');
-}
-
-DBCSDecoder.prototype.end = function() {
- var ret = '';
-
- // Try to parse all remaining chars.
- while (this.prevBytes.length > 0) {
- // Skip 1 character in the buffer.
- ret += this.defaultCharUnicode;
- var bytesArr = this.prevBytes.slice(1);
-
- // Parse remaining as usual.
- this.prevBytes = [];
- this.nodeIdx = 0;
- if (bytesArr.length > 0)
- ret += this.write(bytesArr);
+ /**
+ * Inserts a numeric number value based on the given offset and value.
+ *
+ * @typeparam T { number | bigint } The type of the value to be written
+ *
+ * @param func { Function(offset: T, offset?) => number} The function to write data on the internal Buffer with.
+ * @param byteSize { Number } The number of bytes written.
+ * @param value { T } The number value to write.
+ * @param offset { Number } the offset to write the number at (REQUIRED).
+ *
+ * @returns SmartBuffer this buffer
+ */
+ _insertNumberValue(func, byteSize, value, offset) {
+ // Check for invalid offset values.
+ utils_1.checkOffsetValue(offset);
+ // Ensure there is enough internal Buffer capacity. (raw offset is passed)
+ this.ensureInsertable(byteSize, offset);
+ // Call buffer.writeXXXX();
+ func.call(this._buff, value, offset);
+ // Adjusts internally managed write offset.
+ this._writeOffset += byteSize;
+ return this;
}
-
- this.prevBytes = [];
- this.nodeIdx = 0;
- return ret;
-}
-
-// Binary search for GB18030. Returns largest i such that table[i] <= val.
-function findIdx(table, val) {
- if (table[0] > val)
- return -1;
-
- var l = 0, r = table.length;
- while (l < r-1) { // always table[l] <= val < table[r]
- var mid = l + ((r-l+1) >> 1);
- if (table[mid] <= val)
- l = mid;
- else
- r = mid;
+ /**
+ * Writes a numeric number value based on the given offset and value.
+ *
+ * @typeparam T { number | bigint } The type of the value to be written
+ *
+ * @param func { Function(offset: T, offset?) => number} The function to write data on the internal Buffer with.
+ * @param byteSize { Number } The number of bytes written.
+ * @param value { T } The number value to write.
+ * @param offset { Number } the offset to write the number at (REQUIRED).
+ *
+ * @returns SmartBuffer this buffer
+ */
+ _writeNumberValue(func, byteSize, value, offset) {
+ // If an offset was provided, validate it.
+ if (typeof offset === 'number') {
+ // Check if we're writing beyond the bounds of the managed data.
+ if (offset < 0) {
+ throw new Error(utils_1.ERRORS.INVALID_WRITE_BEYOND_BOUNDS);
+ }
+ utils_1.checkOffsetValue(offset);
+ }
+ // Default to writeOffset if no offset value was given.
+ const offsetVal = typeof offset === 'number' ? offset : this._writeOffset;
+ // Ensure there is enough internal Buffer capacity. (raw offset is passed)
+ this._ensureWriteable(byteSize, offsetVal);
+ func.call(this._buff, value, offsetVal);
+ // If an offset was given, check to see if we wrote beyond the current writeOffset.
+ if (typeof offset === 'number') {
+ this._writeOffset = Math.max(this._writeOffset, offsetVal + byteSize);
+ }
+ else {
+ // If no numeric offset was given, we wrote to the end of the SmartBuffer so increment writeOffset.
+ this._writeOffset += byteSize;
+ }
+ return this;
}
- return l;
}
-
-
+exports.SmartBuffer = SmartBuffer;
+//# sourceMappingURL=smartbuffer.js.map
/***/ }),
-/* 190 */
+/* 119 */,
+/* 120 */
/***/ (function(module, __unusedexports, __webpack_require__) {
-"use strict";
+const compareBuild = __webpack_require__(465)
+const sort = (list, loose) => list.sort((a, b) => compareBuild(a, b, loose))
+module.exports = sort
-var url = __webpack_require__(835)
-var gitHosts = __webpack_require__(813)
-var GitHost = module.exports = __webpack_require__(599)
-var protocolToRepresentationMap = {
- 'git+ssh:': 'sshurl',
- 'git+https:': 'https',
- 'ssh:': 'sshurl',
- 'git:': 'git'
-}
+/***/ }),
+/* 121 */,
+/* 122 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
-function protocolToRepresentation (protocol) {
- return protocolToRepresentationMap[protocol] || protocol.slice(0, -1)
-}
+const SemVer = __webpack_require__(206)
+const major = (a, loose) => new SemVer(a, loose).major
+module.exports = major
-var authProtocols = {
- 'git:': true,
- 'https:': true,
- 'git+https:': true,
- 'http:': true,
- 'git+http:': true
-}
-var cache = {}
+/***/ }),
+/* 123 */,
+/* 124 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
-module.exports.fromUrl = function (giturl, opts) {
- if (typeof giturl !== 'string') return
- var key = giturl + JSON.stringify(opts || {})
+// hoisted class for cyclic dependency
+class Range {
+ constructor (range, options) {
+ options = parseOptions(options)
- if (!(key in cache)) {
- cache[key] = fromUrl(giturl, opts)
- }
+ if (range instanceof Range) {
+ if (
+ range.loose === !!options.loose &&
+ range.includePrerelease === !!options.includePrerelease
+ ) {
+ return range
+ } else {
+ return new Range(range.raw, options)
+ }
+ }
- return cache[key]
-}
+ if (range instanceof Comparator) {
+ // just put it in the set and return
+ this.raw = range.value
+ this.set = [[range]]
+ this.format()
+ return this
+ }
-function fromUrl (giturl, opts) {
- if (giturl == null || giturl === '') return
- var url = fixupUnqualifiedGist(
- isGitHubShorthand(giturl) ? 'github:' + giturl : giturl
- )
- var parsed = parseGitUrl(url)
- var shortcutMatch = url.match(new RegExp('^([^:]+):(?:(?:[^@:]+(?:[^@]+)?@)?([^/]*))[/](.+?)(?:[.]git)?($|#)'))
- var matches = Object.keys(gitHosts).map(function (gitHostName) {
- try {
- var gitHostInfo = gitHosts[gitHostName]
- var auth = null
- if (parsed.auth && authProtocols[parsed.protocol]) {
- auth = parsed.auth
- }
- var committish = parsed.hash ? decodeURIComponent(parsed.hash.substr(1)) : null
- var user = null
- var project = null
- var defaultRepresentation = null
- if (shortcutMatch && shortcutMatch[1] === gitHostName) {
- user = shortcutMatch[2] && decodeURIComponent(shortcutMatch[2])
- project = decodeURIComponent(shortcutMatch[3])
- defaultRepresentation = 'shortcut'
- } else {
- if (parsed.host && parsed.host !== gitHostInfo.domain && parsed.host.replace(/^www[.]/, '') !== gitHostInfo.domain) return
- if (!gitHostInfo.protocols_re.test(parsed.protocol)) return
- if (!parsed.path) return
- var pathmatch = gitHostInfo.pathmatch
- var matched = parsed.path.match(pathmatch)
- if (!matched) return
- /* istanbul ignore else */
- if (matched[1] !== null && matched[1] !== undefined) {
- user = decodeURIComponent(matched[1].replace(/^:/, ''))
+ this.options = options
+ this.loose = !!options.loose
+ this.includePrerelease = !!options.includePrerelease
+
+ // First, split based on boolean or ||
+ this.raw = range
+ this.set = range
+ .split(/\s*\|\|\s*/)
+ // map the range to a 2d array of comparators
+ .map(range => this.parseRange(range.trim()))
+ // throw out any comparator lists that are empty
+ // this generally means that it was not a valid range, which is allowed
+ // in loose mode, but will still throw if the WHOLE range is invalid.
+ .filter(c => c.length)
+
+ if (!this.set.length) {
+ throw new TypeError(`Invalid SemVer Range: ${range}`)
+ }
+
+ // if we have any that are not the null set, throw out null sets.
+ if (this.set.length > 1) {
+ // keep the first one, in case they're all null sets
+ const first = this.set[0]
+ this.set = this.set.filter(c => !isNullSet(c[0]))
+ if (this.set.length === 0)
+ this.set = [first]
+ else if (this.set.length > 1) {
+ // if we have any that are *, then the range is just *
+ for (const c of this.set) {
+ if (c.length === 1 && isAny(c[0])) {
+ this.set = [c]
+ break
+ }
}
- project = decodeURIComponent(matched[2])
- defaultRepresentation = protocolToRepresentation(parsed.protocol)
}
- return new GitHost(gitHostName, user, auth, project, committish, defaultRepresentation, opts)
- } catch (ex) {
- /* istanbul ignore else */
- if (ex instanceof URIError) {
- } else throw ex
}
- }).filter(function (gitHostInfo) { return gitHostInfo })
- if (matches.length !== 1) return
- return matches[0]
-}
-function isGitHubShorthand (arg) {
- // Note: This does not fully test the git ref format.
- // See https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html
- //
- // The only way to do this properly would be to shell out to
- // git-check-ref-format, and as this is a fast sync function,
- // we don't want to do that. Just let git fail if it turns
- // out that the commit-ish is invalid.
- // GH usernames cannot start with . or -
- return /^[^:@%/\s.-][^:@%/\s]*[/][^:@\s/%]+(?:#.*)?$/.test(arg)
-}
+ this.format()
+ }
-function fixupUnqualifiedGist (giturl) {
- // necessary for round-tripping gists
- var parsed = url.parse(giturl)
- if (parsed.protocol === 'gist:' && parsed.host && !parsed.path) {
- return parsed.protocol + '/' + parsed.host
- } else {
- return giturl
+ format () {
+ this.range = this.set
+ .map((comps) => {
+ return comps.join(' ').trim()
+ })
+ .join('||')
+ .trim()
+ return this.range
+ }
+
+ toString () {
+ return this.range
+ }
+
+ parseRange (range) {
+ range = range.trim()
+
+ // memoize range parsing for performance.
+ // this is a very hot path, and fully deterministic.
+ const memoOpts = Object.keys(this.options).join(',')
+ const memoKey = `parseRange:${memoOpts}:${range}`
+ const cached = cache.get(memoKey)
+ if (cached)
+ return cached
+
+ const loose = this.options.loose
+ // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
+ const hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]
+ range = range.replace(hr, hyphenReplace(this.options.includePrerelease))
+ debug('hyphen replace', range)
+ // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
+ range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)
+ debug('comparator trim', range, re[t.COMPARATORTRIM])
+
+ // `~ 1.2.3` => `~1.2.3`
+ range = range.replace(re[t.TILDETRIM], tildeTrimReplace)
+
+ // `^ 1.2.3` => `^1.2.3`
+ range = range.replace(re[t.CARETTRIM], caretTrimReplace)
+
+ // normalize spaces
+ range = range.split(/\s+/).join(' ')
+
+ // At this point, the range is completely trimmed and
+ // ready to be split into comparators.
+
+ const compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
+ const rangeList = range
+ .split(' ')
+ .map(comp => parseComparator(comp, this.options))
+ .join(' ')
+ .split(/\s+/)
+ // >=0.0.0 is equivalent to *
+ .map(comp => replaceGTE0(comp, this.options))
+ // in loose mode, throw out any that are not valid comparators
+ .filter(this.options.loose ? comp => !!comp.match(compRe) : () => true)
+ .map(comp => new Comparator(comp, this.options))
+
+ // if any comparators are the null set, then replace with JUST null set
+ // if more than one comparator, remove any * comparators
+ // also, don't include the same comparator more than once
+ const l = rangeList.length
+ const rangeMap = new Map()
+ for (const comp of rangeList) {
+ if (isNullSet(comp))
+ return [comp]
+ rangeMap.set(comp.value, comp)
+ }
+ if (rangeMap.size > 1 && rangeMap.has(''))
+ rangeMap.delete('')
+
+ const result = [...rangeMap.values()]
+ cache.set(memoKey, result)
+ return result
}
-}
-function parseGitUrl (giturl) {
- var matched = giturl.match(/^([^@]+)@([^:/]+):[/]?((?:[^/]+[/])?[^/]+?)(?:[.]git)?(#.*)?$/)
- if (!matched) {
- var legacy = url.parse(giturl)
- // If we don't have url.URL, then sorry, this is just not fixable.
- // This affects Node <= 6.12.
- if (legacy.auth && typeof url.URL === 'function') {
- // git urls can be in the form of scp-style/ssh-connect strings, like
- // git+ssh://user@host.com:some/path, which the legacy url parser
- // supports, but WhatWG url.URL class does not. However, the legacy
- // parser de-urlencodes the username and password, so something like
- // https://user%3An%40me:p%40ss%3Aword@x.com/ becomes
- // https://user:n@me:p@ss:word@x.com/ which is all kinds of wrong.
- // Pull off just the auth and host, so we dont' get the confusing
- // scp-style URL, then pass that to the WhatWG parser to get the
- // auth properly escaped.
- var authmatch = giturl.match(/[^@]+@[^:/]+/)
- /* istanbul ignore else - this should be impossible */
- if (authmatch) {
- var whatwg = new url.URL(authmatch[0])
- legacy.auth = whatwg.username || ''
- if (whatwg.password) legacy.auth += ':' + whatwg.password
- }
+ intersects (range, options) {
+ if (!(range instanceof Range)) {
+ throw new TypeError('a Range is required')
}
- return legacy
- }
- return {
- protocol: 'git+ssh:',
- slashes: true,
- auth: matched[1],
- host: matched[2],
- port: null,
- hostname: matched[2],
- hash: matched[4],
- search: null,
- query: null,
- pathname: '/' + matched[3],
- path: '/' + matched[3],
- href: 'git+ssh://' + matched[1] + '@' + matched[2] +
- '/' + matched[3] + (matched[4] || '')
- }
-}
+ return this.set.some((thisComparators) => {
+ return (
+ isSatisfiable(thisComparators, options) &&
+ range.set.some((rangeComparators) => {
+ return (
+ isSatisfiable(rangeComparators, options) &&
+ thisComparators.every((thisComparator) => {
+ return rangeComparators.every((rangeComparator) => {
+ return thisComparator.intersects(rangeComparator, options)
+ })
+ })
+ )
+ })
+ )
+ })
+ }
-/***/ }),
-/* 191 */
-/***/ (function(module) {
+ // if ANY of the sets match ALL of its comparators, then pass
+ test (version) {
+ if (!version) {
+ return false
+ }
-module.exports = require("querystring");
+ if (typeof version === 'string') {
+ try {
+ version = new SemVer(version, this.options)
+ } catch (er) {
+ return false
+ }
+ }
-/***/ }),
-/* 192 */,
-/* 193 */,
-/* 194 */,
-/* 195 */,
-/* 196 */,
-/* 197 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
+ for (let i = 0; i < this.set.length; i++) {
+ if (testSet(this.set[i], version, this.options)) {
+ return true
+ }
+ }
+ return false
+ }
+}
+module.exports = Range
-"use strict";
+const LRU = __webpack_require__(200)
+const cache = new LRU({ max: 1000 })
+const parseOptions = __webpack_require__(143)
+const Comparator = __webpack_require__(174)
+const debug = __webpack_require__(548)
+const SemVer = __webpack_require__(206)
+const {
+ re,
+ t,
+ comparatorTrimReplace,
+ tildeTrimReplace,
+ caretTrimReplace
+} = __webpack_require__(328)
-Object.defineProperty(exports, "__esModule", {
- value: true
-});
-exports.default = void 0;
+const isNullSet = c => c.value === '<0.0.0-0'
+const isAny = c => c.value === ''
-var _validate = _interopRequireDefault(__webpack_require__(676));
+// take a set of comparators and determine whether there
+// exists a version which can satisfy it
+const isSatisfiable = (comparators, options) => {
+ let result = true
+ const remainingComparators = comparators.slice()
+ let testComparator = remainingComparators.pop()
-function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+ while (result && remainingComparators.length) {
+ result = remainingComparators.every((otherComparator) => {
+ return testComparator.intersects(otherComparator, options)
+ })
-function parse(uuid) {
- if (!(0, _validate.default)(uuid)) {
- throw TypeError('Invalid UUID');
+ testComparator = remainingComparators.pop()
}
- let v;
- const arr = new Uint8Array(16); // Parse ########-....-....-....-............
+ return result
+}
- arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;
- arr[1] = v >>> 16 & 0xff;
- arr[2] = v >>> 8 & 0xff;
- arr[3] = v & 0xff; // Parse ........-####-....-....-............
+// comprised of xranges, tildes, stars, and gtlt's at this point.
+// already replaced the hyphen ranges
+// turn into a set of JUST comparators.
+const parseComparator = (comp, options) => {
+ debug('comp', comp, options)
+ comp = replaceCarets(comp, options)
+ debug('caret', comp)
+ comp = replaceTildes(comp, options)
+ debug('tildes', comp)
+ comp = replaceXRanges(comp, options)
+ debug('xrange', comp)
+ comp = replaceStars(comp, options)
+ debug('stars', comp)
+ return comp
+}
- arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;
- arr[5] = v & 0xff; // Parse ........-....-####-....-............
+const isX = id => !id || id.toLowerCase() === 'x' || id === '*'
- arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;
- arr[7] = v & 0xff; // Parse ........-....-....-####-............
+// ~, ~> --> * (any, kinda silly)
+// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0-0
+// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0-0
+// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0-0
+// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0
+// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0
+const replaceTildes = (comp, options) =>
+ comp.trim().split(/\s+/).map((comp) => {
+ return replaceTilde(comp, options)
+ }).join(' ')
- arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;
- arr[9] = v & 0xff; // Parse ........-....-....-....-############
- // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes)
+const replaceTilde = (comp, options) => {
+ const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]
+ return comp.replace(r, (_, M, m, p, pr) => {
+ debug('tilde', comp, _, M, m, p, pr)
+ let ret
- arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;
- arr[11] = v / 0x100000000 & 0xff;
- arr[12] = v >>> 24 & 0xff;
- arr[13] = v >>> 16 & 0xff;
- arr[14] = v >>> 8 & 0xff;
- arr[15] = v & 0xff;
- return arr;
+ if (isX(M)) {
+ ret = ''
+ } else if (isX(m)) {
+ ret = `>=${M}.0.0 <${+M + 1}.0.0-0`
+ } else if (isX(p)) {
+ // ~1.2 == >=1.2.0 <1.3.0-0
+ ret = `>=${M}.${m}.0 <${M}.${+m + 1}.0-0`
+ } else if (pr) {
+ debug('replaceTilde pr', pr)
+ ret = `>=${M}.${m}.${p}-${pr
+ } <${M}.${+m + 1}.0-0`
+ } else {
+ // ~1.2.3 == >=1.2.3 <1.3.0-0
+ ret = `>=${M}.${m}.${p
+ } <${M}.${+m + 1}.0-0`
+ }
+
+ debug('tilde return', ret)
+ return ret
+ })
}
-var _default = parse;
-exports.default = _default;
+// ^ --> * (any, kinda silly)
+// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0-0
+// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0-0
+// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0-0
+// ^1.2.3 --> >=1.2.3 <2.0.0-0
+// ^1.2.0 --> >=1.2.0 <2.0.0-0
+const replaceCarets = (comp, options) =>
+ comp.trim().split(/\s+/).map((comp) => {
+ return replaceCaret(comp, options)
+ }).join(' ')
-/***/ }),
-/* 198 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
+const replaceCaret = (comp, options) => {
+ debug('caret', comp, options)
+ const r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]
+ const z = options.includePrerelease ? '-0' : ''
+ return comp.replace(r, (_, M, m, p, pr) => {
+ debug('caret', comp, _, M, m, p, pr)
+ let ret
-"use strict";
+ if (isX(M)) {
+ ret = ''
+ } else if (isX(m)) {
+ ret = `>=${M}.0.0${z} <${+M + 1}.0.0-0`
+ } else if (isX(p)) {
+ if (M === '0') {
+ ret = `>=${M}.${m}.0${z} <${M}.${+m + 1}.0-0`
+ } else {
+ ret = `>=${M}.${m}.0${z} <${+M + 1}.0.0-0`
+ }
+ } else if (pr) {
+ debug('replaceCaret pr', pr)
+ if (M === '0') {
+ if (m === '0') {
+ ret = `>=${M}.${m}.${p}-${pr
+ } <${M}.${m}.${+p + 1}-0`
+ } else {
+ ret = `>=${M}.${m}.${p}-${pr
+ } <${M}.${+m + 1}.0-0`
+ }
+ } else {
+ ret = `>=${M}.${m}.${p}-${pr
+ } <${+M + 1}.0.0-0`
+ }
+ } else {
+ debug('no pr')
+ if (M === '0') {
+ if (m === '0') {
+ ret = `>=${M}.${m}.${p
+ }${z} <${M}.${m}.${+p + 1}-0`
+ } else {
+ ret = `>=${M}.${m}.${p
+ }${z} <${M}.${+m + 1}.0-0`
+ }
+ } else {
+ ret = `>=${M}.${m}.${p
+ } <${+M + 1}.0.0-0`
+ }
+ }
-function __export(m) {
- for (var p in m) if (!exports.hasOwnProperty(p)) exports[p] = m[p];
+ debug('caret return', ret)
+ return ret
+ })
}
-Object.defineProperty(exports, "__esModule", { value: true });
-__export(__webpack_require__(359));
-//# sourceMappingURL=index.js.map
-
-/***/ }),
-/* 199 */,
-/* 200 */
-/***/ (function(module) {
-module.exports = function () {
- // see https://code.google.com/p/v8/wiki/JavaScriptStackTraceApi
- var origPrepareStackTrace = Error.prepareStackTrace;
- Error.prepareStackTrace = function (_, stack) { return stack; };
- var stack = (new Error()).stack;
- Error.prepareStackTrace = origPrepareStackTrace;
- return stack[2].getFileName();
-};
+const replaceXRanges = (comp, options) => {
+ debug('replaceXRanges', comp, options)
+ return comp.split(/\s+/).map((comp) => {
+ return replaceXRange(comp, options)
+ }).join(' ')
+}
+const replaceXRange = (comp, options) => {
+ comp = comp.trim()
+ const r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE]
+ return comp.replace(r, (ret, gtlt, M, m, p, pr) => {
+ debug('xRange', comp, ret, gtlt, M, m, p, pr)
+ const xM = isX(M)
+ const xm = xM || isX(m)
+ const xp = xm || isX(p)
+ const anyX = xp
-/***/ }),
-/* 201 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+ if (gtlt === '=' && anyX) {
+ gtlt = ''
+ }
-"use strict";
+ // if we're including prereleases in the match, then we need
+ // to fix this to -0, the lowest possible prerelease value
+ pr = options.includePrerelease ? '-0' : ''
+ if (xM) {
+ if (gtlt === '>' || gtlt === '<') {
+ // nothing is allowed
+ ret = '<0.0.0-0'
+ } else {
+ // nothing is forbidden
+ ret = '*'
+ }
+ } else if (gtlt && anyX) {
+ // we know patch is an x, because we have any x at all.
+ // replace X with 0
+ if (xm) {
+ m = 0
+ }
+ p = 0
-const fs = __webpack_require__(598)
-const BB = __webpack_require__(489)
-const chmod = BB.promisify(fs.chmod)
-const unlink = BB.promisify(fs.unlink)
-let move
-let pinflight
-
-module.exports = moveFile
-function moveFile (src, dest) {
- // This isn't quite an fs.rename -- the assumption is that
- // if `dest` already exists, and we get certain errors while
- // trying to move it, we should just not bother.
- //
- // In the case of cache corruption, users will receive an
- // EINTEGRITY error elsewhere, and can remove the offending
- // content their own way.
- //
- // Note that, as the name suggests, this strictly only supports file moves.
- return BB.fromNode(cb => {
- fs.link(src, dest, err => {
- if (err) {
- if (err.code === 'EEXIST' || err.code === 'EBUSY') {
- // file already exists, so whatever
- } else if (err.code === 'EPERM' && process.platform === 'win32') {
- // file handle stayed open even past graceful-fs limits
+ if (gtlt === '>') {
+ // >1 => >=2.0.0
+ // >1.2 => >=1.3.0
+ gtlt = '>='
+ if (xm) {
+ M = +M + 1
+ m = 0
+ p = 0
} else {
- return cb(err)
+ m = +m + 1
+ p = 0
}
- }
- return cb()
- })
- }).then(() => {
- // content should never change for any reason, so make it read-only
- return BB.join(unlink(src), process.platform !== 'win32' && chmod(dest, '0444'))
- }).catch(() => {
- if (!pinflight) { pinflight = __webpack_require__(593) }
- return pinflight('cacache-move-file:' + dest, () => {
- return BB.promisify(fs.stat)(dest).catch(err => {
- if (err.code !== 'ENOENT') {
- // Something else is wrong here. Bail bail bail
- throw err
+ } else if (gtlt === '<=') {
+ // <=0.7.x is actually <0.8.0, since any 0.7.x should
+ // pass. Similarly, <=7.x is actually <8.0.0, etc.
+ gtlt = '<'
+ if (xm) {
+ M = +M + 1
+ } else {
+ m = +m + 1
}
- // file doesn't already exist! let's try a rename -> copy fallback
- if (!move) { move = __webpack_require__(184) }
- return move(src, dest, { BB, fs })
- })
- })
- })
-}
+ }
+ if (gtlt === '<')
+ pr = '-0'
-/***/ }),
-/* 202 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+ ret = `${gtlt + M}.${m}.${p}${pr}`
+ } else if (xm) {
+ ret = `>=${M}.0.0${pr} <${+M + 1}.0.0-0`
+ } else if (xp) {
+ ret = `>=${M}.${m}.0${pr
+ } <${M}.${+m + 1}.0-0`
+ }
-"use strict";
+ debug('xRange return', ret)
-var process = __webpack_require__(356)
-try {
- module.exports = setImmediate
-} catch (ex) {
- module.exports = process.nextTick
+ return ret
+ })
}
+// Because * is AND-ed with everything else in the comparator,
+// and '' means "any version", just remove the *s entirely.
+const replaceStars = (comp, options) => {
+ debug('replaceStars', comp, options)
+ // Looseness is ignored here. star is always as loose as it gets!
+ return comp.trim().replace(re[t.STAR], '')
+}
-/***/ }),
-/* 203 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-"use strict";
-
-
-// this[BUFFER] is the remainder of a chunk if we're waiting for
-// the full 512 bytes of a header to come in. We will Buffer.concat()
-// it to the next write(), which is a mem copy, but a small one.
-//
-// this[QUEUE] is a Yallist of entries that haven't been emitted
-// yet this can only get filled up if the user keeps write()ing after
-// a write() returns false, or does a write() with more than one entry
-//
-// We don't buffer chunks, we always parse them and either create an
-// entry, or push it into the active entry. The ReadEntry class knows
-// to throw data away if .ignore=true
-//
-// Shift entry off the buffer when it emits 'end', and emit 'entry' for
-// the next one in the list.
-//
-// At any time, we're pushing body chunks into the entry at WRITEENTRY,
-// and waiting for 'end' on the entry at READENTRY
-//
-// ignored entries get .resume() called on them straight away
-
-const warner = __webpack_require__(571)
-const path = __webpack_require__(622)
-const Header = __webpack_require__(725)
-const EE = __webpack_require__(614)
-const Yallist = __webpack_require__(612)
-const maxMetaEntrySize = 1024 * 1024
-const Entry = __webpack_require__(589)
-const Pax = __webpack_require__(480)
-const zlib = __webpack_require__(268)
-const Buffer = __webpack_require__(921)
+const replaceGTE0 = (comp, options) => {
+ debug('replaceGTE0', comp, options)
+ return comp.trim()
+ .replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '')
+}
-const gzipHeader = Buffer.from([0x1f, 0x8b])
-const STATE = Symbol('state')
-const WRITEENTRY = Symbol('writeEntry')
-const READENTRY = Symbol('readEntry')
-const NEXTENTRY = Symbol('nextEntry')
-const PROCESSENTRY = Symbol('processEntry')
-const EX = Symbol('extendedHeader')
-const GEX = Symbol('globalExtendedHeader')
-const META = Symbol('meta')
-const EMITMETA = Symbol('emitMeta')
-const BUFFER = Symbol('buffer')
-const QUEUE = Symbol('queue')
-const ENDED = Symbol('ended')
-const EMITTEDEND = Symbol('emittedEnd')
-const EMIT = Symbol('emit')
-const UNZIP = Symbol('unzip')
-const CONSUMECHUNK = Symbol('consumeChunk')
-const CONSUMECHUNKSUB = Symbol('consumeChunkSub')
-const CONSUMEBODY = Symbol('consumeBody')
-const CONSUMEMETA = Symbol('consumeMeta')
-const CONSUMEHEADER = Symbol('consumeHeader')
-const CONSUMING = Symbol('consuming')
-const BUFFERCONCAT = Symbol('bufferConcat')
-const MAYBEEND = Symbol('maybeEnd')
-const WRITING = Symbol('writing')
-const ABORTED = Symbol('aborted')
-const DONE = Symbol('onDone')
+// This function is passed to string.replace(re[t.HYPHENRANGE])
+// M, m, patch, prerelease, build
+// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
+// 1.2.3 - 3.4 => >=1.2.0 <3.5.0-0 Any 3.4.x will do
+// 1.2 - 3.4 => >=1.2.0 <3.5.0-0
+const hyphenReplace = incPr => ($0,
+ from, fM, fm, fp, fpr, fb,
+ to, tM, tm, tp, tpr, tb) => {
+ if (isX(fM)) {
+ from = ''
+ } else if (isX(fm)) {
+ from = `>=${fM}.0.0${incPr ? '-0' : ''}`
+ } else if (isX(fp)) {
+ from = `>=${fM}.${fm}.0${incPr ? '-0' : ''}`
+ } else if (fpr) {
+ from = `>=${from}`
+ } else {
+ from = `>=${from}${incPr ? '-0' : ''}`
+ }
-const noop = _ => true
+ if (isX(tM)) {
+ to = ''
+ } else if (isX(tm)) {
+ to = `<${+tM + 1}.0.0-0`
+ } else if (isX(tp)) {
+ to = `<${tM}.${+tm + 1}.0-0`
+ } else if (tpr) {
+ to = `<=${tM}.${tm}.${tp}-${tpr}`
+ } else if (incPr) {
+ to = `<${tM}.${tm}.${+tp + 1}-0`
+ } else {
+ to = `<=${to}`
+ }
-module.exports = warner(class Parser extends EE {
- constructor (opt) {
- opt = opt || {}
- super(opt)
+ return (`${from} ${to}`).trim()
+}
- if (opt.ondone)
- this.on(DONE, opt.ondone)
- else
- this.on(DONE, _ => {
- this.emit('prefinish')
- this.emit('finish')
- this.emit('end')
- this.emit('close')
- })
+const testSet = (set, version, options) => {
+ for (let i = 0; i < set.length; i++) {
+ if (!set[i].test(version)) {
+ return false
+ }
+ }
- this.strict = !!opt.strict
- this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize
- this.filter = typeof opt.filter === 'function' ? opt.filter : noop
+ if (version.prerelease.length && !options.includePrerelease) {
+ // Find the set of versions that are allowed to have prereleases
+ // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
+ // That should allow `1.2.3-pr.2` to pass.
+ // However, `1.2.4-alpha.notready` should NOT be allowed,
+ // even though it's within the range set by the comparators.
+ for (let i = 0; i < set.length; i++) {
+ debug(set[i].semver)
+ if (set[i].semver === Comparator.ANY) {
+ continue
+ }
- // have to set this so that streams are ok piping into it
- this.writable = true
- this.readable = false
+ if (set[i].semver.prerelease.length > 0) {
+ const allowed = set[i].semver
+ if (allowed.major === version.major &&
+ allowed.minor === version.minor &&
+ allowed.patch === version.patch) {
+ return true
+ }
+ }
+ }
- this[QUEUE] = new Yallist()
- this[BUFFER] = null
- this[READENTRY] = null
- this[WRITEENTRY] = null
- this[STATE] = 'begin'
- this[META] = ''
- this[EX] = null
- this[GEX] = null
- this[ENDED] = false
- this[UNZIP] = null
- this[ABORTED] = false
- if (typeof opt.onwarn === 'function')
- this.on('warn', opt.onwarn)
- if (typeof opt.onentry === 'function')
- this.on('entry', opt.onentry)
+ // Version has a -pre, but it's not one of the ones we like.
+ return false
}
- [CONSUMEHEADER] (chunk, position) {
- let header
- try {
- header = new Header(chunk, position, this[EX], this[GEX])
- } catch (er) {
- return this.warn('invalid entry', er)
- }
+ return true
+}
- if (header.nullBlock)
- this[EMIT]('nullBlock')
- else if (!header.cksumValid)
- this.warn('invalid entry', header)
- else if (!header.path)
- this.warn('invalid: path is required', header)
- else {
- const type = header.type
- if (/^(Symbolic)?Link$/.test(type) && !header.linkpath)
- this.warn('invalid: linkpath required', header)
- else if (!/^(Symbolic)?Link$/.test(type) && header.linkpath)
- this.warn('invalid: linkpath forbidden', header)
- else {
- const entry = this[WRITEENTRY] = new Entry(header, this[EX], this[GEX])
- if (entry.meta) {
- if (entry.size > this.maxMetaEntrySize) {
- entry.ignore = true
- this[EMIT]('ignoredEntry', entry)
- this[STATE] = 'ignore'
- } else if (entry.size > 0) {
- this[META] = ''
- entry.on('data', c => this[META] += c)
- this[STATE] = 'meta'
- }
- } else {
+/***/ }),
+/* 125 */,
+/* 126 */,
+/* 127 */
+/***/ (function(module, exports, __webpack_require__) {
- this[EX] = null
- entry.ignore = entry.ignore || !this.filter(entry.path, entry)
- if (entry.ignore) {
- this[EMIT]('ignoredEntry', entry)
- this[STATE] = entry.remain ? 'ignore' : 'begin'
- } else {
- if (entry.remain)
- this[STATE] = 'body'
- else {
- this[STATE] = 'begin'
- entry.end()
- }
+"use strict";
- if (!this[READENTRY]) {
- this[QUEUE].push(entry)
- this[NEXTENTRY]()
- } else
- this[QUEUE].push(entry)
- }
- }
- }
- }
- }
- [PROCESSENTRY] (entry) {
- let go = true
+const path = __webpack_require__(622)
+const nopt = __webpack_require__(401)
+const log = __webpack_require__(412)
+const childProcess = __webpack_require__(129)
+const EE = __webpack_require__(614).EventEmitter
+const inherits = __webpack_require__(669).inherits
+const commands = [
+ // Module build commands
+ 'build',
+ 'clean',
+ 'configure',
+ 'rebuild',
+ // Development Header File management commands
+ 'install',
+ 'list',
+ 'remove'
+]
+const aliases = {
+ ls: 'list',
+ rm: 'remove'
+}
- if (!entry) {
- this[READENTRY] = null
- go = false
- } else if (Array.isArray(entry))
- this.emit.apply(this, entry)
- else {
- this[READENTRY] = entry
- this.emit('entry', entry)
- if (!entry.emittedEnd) {
- entry.on('end', _ => this[NEXTENTRY]())
- go = false
- }
- }
+// differentiate node-gyp's logs from npm's
+log.heading = 'gyp'
- return go
- }
+function gyp () {
+ return new Gyp()
+}
- [NEXTENTRY] () {
- do {} while (this[PROCESSENTRY](this[QUEUE].shift()))
+function Gyp () {
+ var self = this
- if (!this[QUEUE].length) {
- // At this point, there's nothing in the queue, but we may have an
- // entry which is being consumed (readEntry).
- // If we don't, then we definitely can handle more data.
- // If we do, and either it's flowing, or it has never had any data
- // written to it, then it needs more.
- // The only other possibility is that it has returned false from a
- // write() call, so we wait for the next drain to continue.
- const re = this[READENTRY]
- const drainNow = !re || re.flowing || re.size === re.remain
- if (drainNow) {
- if (!this[WRITING])
- this.emit('drain')
- } else
- re.once('drain', _ => this.emit('drain'))
- }
- }
+ this.devDir = ''
+ this.commands = {}
- [CONSUMEBODY] (chunk, position) {
- // write up to but no more than writeEntry.blockRemain
- const entry = this[WRITEENTRY]
- const br = entry.blockRemain
- const c = (br >= chunk.length && position === 0) ? chunk
- : chunk.slice(position, position + br)
+ commands.forEach(function (command) {
+ self.commands[command] = function (argv, callback) {
+ log.verbose('command', command, argv)
+ return require('./' + command)(self, argv, callback)
+ }
+ })
+}
+inherits(Gyp, EE)
+exports.Gyp = Gyp
+var proto = Gyp.prototype
- entry.write(c)
+/**
+ * Export the contents of the package.json.
+ */
- if (!entry.blockRemain) {
- this[STATE] = 'begin'
- this[WRITEENTRY] = null
- entry.end()
- }
+proto.package = __webpack_require__(387)
- return c.length
- }
+/**
+ * nopt configuration definitions
+ */
- [CONSUMEMETA] (chunk, position) {
- const entry = this[WRITEENTRY]
- const ret = this[CONSUMEBODY](chunk, position)
+proto.configDefs = {
+ help: Boolean, // everywhere
+ arch: String, // 'configure'
+ cafile: String, // 'install'
+ debug: Boolean, // 'build'
+ directory: String, // bin
+ make: String, // 'build'
+ msvs_version: String, // 'configure'
+ ensure: Boolean, // 'install'
+ solution: String, // 'build' (windows only)
+ proxy: String, // 'install'
+ noproxy: String, // 'install'
+ devdir: String, // everywhere
+ nodedir: String, // 'configure'
+ loglevel: String, // everywhere
+ python: String, // 'configure'
+ 'dist-url': String, // 'install'
+ tarball: String, // 'install'
+ jobs: String, // 'build'
+ thin: String // 'configure'
+}
- // if we finished, then the entry is reset
- if (!this[WRITEENTRY])
- this[EMITMETA](entry)
+/**
+ * nopt shorthands
+ */
- return ret
- }
+proto.shorthands = {
+ release: '--no-debug',
+ C: '--directory',
+ debug: '--debug',
+ j: '--jobs',
+ silly: '--loglevel=silly',
+ verbose: '--loglevel=verbose',
+ silent: '--loglevel=silent'
+}
- [EMIT] (ev, data, extra) {
- if (!this[QUEUE].length && !this[READENTRY])
- this.emit(ev, data, extra)
- else
- this[QUEUE].push([ev, data, extra])
- }
+/**
+ * expose the command aliases for the bin file to use.
+ */
- [EMITMETA] (entry) {
- this[EMIT]('meta', this[META])
- switch (entry.type) {
- case 'ExtendedHeader':
- case 'OldExtendedHeader':
- this[EX] = Pax.parse(this[META], this[EX], false)
- break
+proto.aliases = aliases
- case 'GlobalExtendedHeader':
- this[GEX] = Pax.parse(this[META], this[GEX], true)
- break
+/**
+ * Parses the given argv array and sets the 'opts',
+ * 'argv' and 'command' properties.
+ */
- case 'NextFileHasLongPath':
- case 'OldGnuLongPath':
- this[EX] = this[EX] || Object.create(null)
- this[EX].path = this[META].replace(/\0.*/, '')
- break
+proto.parseArgv = function parseOpts (argv) {
+ this.opts = nopt(this.configDefs, this.shorthands, argv)
+ this.argv = this.opts.argv.remain.slice()
- case 'NextFileHasLongLinkpath':
- this[EX] = this[EX] || Object.create(null)
- this[EX].linkpath = this[META].replace(/\0.*/, '')
- break
+ var commands = this.todo = []
- /* istanbul ignore next */
- default: throw new Error('unknown meta: ' + entry.type)
+ // create a copy of the argv array with aliases mapped
+ argv = this.argv.map(function (arg) {
+ // is this an alias?
+ if (arg in this.aliases) {
+ arg = this.aliases[arg]
}
- }
+ return arg
+ }, this)
- abort (msg, error) {
- this[ABORTED] = true
- this.warn(msg, error)
- this.emit('abort', error)
- this.emit('error', error)
+ // process the mapped args into "command" objects ("name" and "args" props)
+ argv.slice().forEach(function (arg) {
+ if (arg in this.commands) {
+ var args = argv.splice(0, argv.indexOf(arg))
+ argv.shift()
+ if (commands.length > 0) {
+ commands[commands.length - 1].args = args
+ }
+ commands.push({ name: arg, args: [] })
+ }
+ }, this)
+ if (commands.length > 0) {
+ commands[commands.length - 1].args = argv.splice(0)
}
- write (chunk) {
- if (this[ABORTED])
+ // support for inheriting config env variables from npm
+ var npmConfigPrefix = 'npm_config_'
+ Object.keys(process.env).forEach(function (name) {
+ if (name.indexOf(npmConfigPrefix) !== 0) {
return
-
- // first write, might be gzipped
- if (this[UNZIP] === null && chunk) {
- if (this[BUFFER]) {
- chunk = Buffer.concat([this[BUFFER], chunk])
- this[BUFFER] = null
- }
- if (chunk.length < gzipHeader.length) {
- this[BUFFER] = chunk
- return true
- }
- for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) {
- if (chunk[i] !== gzipHeader[i])
- this[UNZIP] = false
- }
- if (this[UNZIP] === null) {
- const ended = this[ENDED]
- this[ENDED] = false
- this[UNZIP] = new zlib.Unzip()
- this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk))
- this[UNZIP].on('error', er =>
- this.abort(er.message, er))
- this[UNZIP].on('end', _ => {
- this[ENDED] = true
- this[CONSUMECHUNK]()
- })
- this[WRITING] = true
- const ret = this[UNZIP][ended ? 'end' : 'write' ](chunk)
- this[WRITING] = false
- return ret
- }
- }
-
- this[WRITING] = true
- if (this[UNZIP])
- this[UNZIP].write(chunk)
- else
- this[CONSUMECHUNK](chunk)
- this[WRITING] = false
-
- // return false if there's a queue, or if the current entry isn't flowing
- const ret =
- this[QUEUE].length ? false :
- this[READENTRY] ? this[READENTRY].flowing :
- true
-
- // if we have no queue, then that means a clogged READENTRY
- if (!ret && !this[QUEUE].length)
- this[READENTRY].once('drain', _ => this.emit('drain'))
-
- return ret
- }
-
- [BUFFERCONCAT] (c) {
- if (c && !this[ABORTED])
- this[BUFFER] = this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c
- }
-
- [MAYBEEND] () {
- if (this[ENDED] &&
- !this[EMITTEDEND] &&
- !this[ABORTED] &&
- !this[CONSUMING]) {
- this[EMITTEDEND] = true
- const entry = this[WRITEENTRY]
- if (entry && entry.blockRemain) {
- const have = this[BUFFER] ? this[BUFFER].length : 0
- this.warn('Truncated input (needed ' + entry.blockRemain +
- ' more bytes, only ' + have + ' available)', entry)
- if (this[BUFFER])
- entry.write(this[BUFFER])
- entry.end()
- }
- this[EMIT](DONE)
}
- }
-
- [CONSUMECHUNK] (chunk) {
- if (this[CONSUMING]) {
- this[BUFFERCONCAT](chunk)
- } else if (!chunk && !this[BUFFER]) {
- this[MAYBEEND]()
+ var val = process.env[name]
+ if (name === npmConfigPrefix + 'loglevel') {
+ log.level = val
} else {
- this[CONSUMING] = true
- if (this[BUFFER]) {
- this[BUFFERCONCAT](chunk)
- const c = this[BUFFER]
- this[BUFFER] = null
- this[CONSUMECHUNKSUB](c)
- } else {
- this[CONSUMECHUNKSUB](chunk)
- }
-
- while (this[BUFFER] && this[BUFFER].length >= 512 && !this[ABORTED]) {
- const c = this[BUFFER]
- this[BUFFER] = null
- this[CONSUMECHUNKSUB](c)
+ // add the user-defined options to the config
+ name = name.substring(npmConfigPrefix.length)
+ // gyp@741b7f1 enters an infinite loop when it encounters
+ // zero-length options so ensure those don't get through.
+ if (name) {
+ this.opts[name] = val
}
- this[CONSUMING] = false
}
+ }, this)
- if (!this[BUFFER] || this[ENDED])
- this[MAYBEEND]()
+ if (this.opts.loglevel) {
+ log.level = this.opts.loglevel
}
+ log.resume()
+}
- [CONSUMECHUNKSUB] (chunk) {
- // we know that we are in CONSUMING mode, so anything written goes into
- // the buffer. Advance the position and put any remainder in the buffer.
- let position = 0
- let length = chunk.length
- while (position + 512 <= length && !this[ABORTED]) {
- switch (this[STATE]) {
- case 'begin':
- this[CONSUMEHEADER](chunk, position)
- position += 512
- break
-
- case 'ignore':
- case 'body':
- position += this[CONSUMEBODY](chunk, position)
- break
-
- case 'meta':
- position += this[CONSUMEMETA](chunk, position)
- break
-
- /* istanbul ignore next */
- default:
- throw new Error('invalid state: ' + this[STATE])
- }
- }
+/**
+ * Spawns a child process and emits a 'spawn' event.
+ */
- if (position < length) {
- if (this[BUFFER])
- this[BUFFER] = Buffer.concat([chunk.slice(position), this[BUFFER]])
- else
- this[BUFFER] = chunk.slice(position)
- }
+proto.spawn = function spawn (command, args, opts) {
+ if (!opts) {
+ opts = {}
}
-
- end (chunk) {
- if (!this[ABORTED]) {
- if (this[UNZIP])
- this[UNZIP].end(chunk)
- else {
- this[ENDED] = true
- this.write(chunk)
- }
- }
+ if (!opts.silent && !opts.stdio) {
+ opts.stdio = [0, 1, 2]
}
-})
+ var cp = childProcess.spawn(command, args, opts)
+ log.info('spawn', command)
+ log.info('spawn args', args)
+ return cp
+}
+/**
+ * Returns the usage instructions for node-gyp.
+ */
-/***/ }),
-/* 204 */,
-/* 205 */,
-/* 206 */
-/***/ (function(__unusedmodule, exports) {
+proto.usage = function usage () {
+ var str = [
+ '',
+ ' Usage: node-gyp [options]',
+ '',
+ ' where is one of:',
+ commands.map(function (c) {
+ return ' - ' + c + ' - ' + require('./' + c).usage
+ }).join('\n'),
+ '',
+ 'node-gyp@' + this.version + ' ' + path.resolve(__dirname, '..'),
+ 'node@' + process.versions.node
+ ].join('\n')
+ return str
+}
-"use strict";
+/**
+ * Version number getter.
+ */
-Object.defineProperty(exports, "__esModule", { value: true });
-const DEFAULT_TIMEOUT = 30000;
-exports.DEFAULT_TIMEOUT = DEFAULT_TIMEOUT;
-// prettier-ignore
-const ERRORS = {
- InvalidSocksCommand: 'An invalid SOCKS command was provided. Valid options are connect, bind, and associate.',
- InvalidSocksCommandForOperation: 'An invalid SOCKS command was provided. Only a subset of commands are supported for this operation.',
- InvalidSocksCommandChain: 'An invalid SOCKS command was provided. Chaining currently only supports the connect command.',
- InvalidSocksClientOptionsDestination: 'An invalid destination host was provided.',
- InvalidSocksClientOptionsExistingSocket: 'An invalid existing socket was provided. This should be an instance of stream.Duplex.',
- InvalidSocksClientOptionsProxy: 'Invalid SOCKS proxy details were provided.',
- InvalidSocksClientOptionsTimeout: 'An invalid timeout value was provided. Please enter a value above 0 (in ms).',
- InvalidSocksClientOptionsProxiesLength: 'At least two socks proxies must be provided for chaining.',
- NegotiationError: 'Negotiation error',
- SocketClosed: 'Socket closed',
- ProxyConnectionTimedOut: 'Proxy connection timed out',
- InternalError: 'SocksClient internal error (this should not happen)',
- InvalidSocks4HandshakeResponse: 'Received invalid Socks4 handshake response',
- Socks4ProxyRejectedConnection: 'Socks4 Proxy rejected connection',
- InvalidSocks4IncomingConnectionResponse: 'Socks4 invalid incoming connection response',
- Socks4ProxyRejectedIncomingBoundConnection: 'Socks4 Proxy rejected incoming bound connection',
- InvalidSocks5InitialHandshakeResponse: 'Received invalid Socks5 initial handshake response',
- InvalidSocks5IntiailHandshakeSocksVersion: 'Received invalid Socks5 initial handshake (invalid socks version)',
- InvalidSocks5InitialHandshakeNoAcceptedAuthType: 'Received invalid Socks5 initial handshake (no accepted authentication type)',
- InvalidSocks5InitialHandshakeUnknownAuthType: 'Received invalid Socks5 initial handshake (unknown authentication type)',
- Socks5AuthenticationFailed: 'Socks5 Authentication failed',
- InvalidSocks5FinalHandshake: 'Received invalid Socks5 final handshake response',
- InvalidSocks5FinalHandshakeRejected: 'Socks5 proxy rejected connection',
- InvalidSocks5IncomingConnectionResponse: 'Received invalid Socks5 incoming connection response',
- Socks5ProxyRejectedIncomingBoundConnection: 'Socks5 Proxy rejected incoming bound connection',
-};
-exports.ERRORS = ERRORS;
-const SOCKS_INCOMING_PACKET_SIZES = {
- Socks5InitialHandshakeResponse: 2,
- Socks5UserPassAuthenticationResponse: 2,
- // Command response + incoming connection (bind)
- Socks5ResponseHeader: 5,
- Socks5ResponseIPv4: 10,
- Socks5ResponseIPv6: 22,
- Socks5ResponseHostname: (hostNameLength) => hostNameLength + 7,
- // Command response + incoming connection (bind)
- Socks4Response: 8 // 2 header + 2 port + 4 ip
-};
-exports.SOCKS_INCOMING_PACKET_SIZES = SOCKS_INCOMING_PACKET_SIZES;
-var SocksCommand;
-(function (SocksCommand) {
- SocksCommand[SocksCommand["connect"] = 1] = "connect";
- SocksCommand[SocksCommand["bind"] = 2] = "bind";
- SocksCommand[SocksCommand["associate"] = 3] = "associate";
-})(SocksCommand || (SocksCommand = {}));
-exports.SocksCommand = SocksCommand;
-var Socks4Response;
-(function (Socks4Response) {
- Socks4Response[Socks4Response["Granted"] = 90] = "Granted";
- Socks4Response[Socks4Response["Failed"] = 91] = "Failed";
- Socks4Response[Socks4Response["Rejected"] = 92] = "Rejected";
- Socks4Response[Socks4Response["RejectedIdent"] = 93] = "RejectedIdent";
-})(Socks4Response || (Socks4Response = {}));
-exports.Socks4Response = Socks4Response;
-var Socks5Auth;
-(function (Socks5Auth) {
- Socks5Auth[Socks5Auth["NoAuth"] = 0] = "NoAuth";
- Socks5Auth[Socks5Auth["GSSApi"] = 1] = "GSSApi";
- Socks5Auth[Socks5Auth["UserPass"] = 2] = "UserPass";
-})(Socks5Auth || (Socks5Auth = {}));
-exports.Socks5Auth = Socks5Auth;
-var Socks5Response;
-(function (Socks5Response) {
- Socks5Response[Socks5Response["Granted"] = 0] = "Granted";
- Socks5Response[Socks5Response["Failure"] = 1] = "Failure";
- Socks5Response[Socks5Response["NotAllowed"] = 2] = "NotAllowed";
- Socks5Response[Socks5Response["NetworkUnreachable"] = 3] = "NetworkUnreachable";
- Socks5Response[Socks5Response["HostUnreachable"] = 4] = "HostUnreachable";
- Socks5Response[Socks5Response["ConnectionRefused"] = 5] = "ConnectionRefused";
- Socks5Response[Socks5Response["TTLExpired"] = 6] = "TTLExpired";
- Socks5Response[Socks5Response["CommandNotSupported"] = 7] = "CommandNotSupported";
- Socks5Response[Socks5Response["AddressNotSupported"] = 8] = "AddressNotSupported";
-})(Socks5Response || (Socks5Response = {}));
-exports.Socks5Response = Socks5Response;
-var Socks5HostType;
-(function (Socks5HostType) {
- Socks5HostType[Socks5HostType["IPv4"] = 1] = "IPv4";
- Socks5HostType[Socks5HostType["Hostname"] = 3] = "Hostname";
- Socks5HostType[Socks5HostType["IPv6"] = 4] = "IPv6";
-})(Socks5HostType || (Socks5HostType = {}));
-exports.Socks5HostType = Socks5HostType;
-var SocksClientState;
-(function (SocksClientState) {
- SocksClientState[SocksClientState["Created"] = 0] = "Created";
- SocksClientState[SocksClientState["Connecting"] = 1] = "Connecting";
- SocksClientState[SocksClientState["Connected"] = 2] = "Connected";
- SocksClientState[SocksClientState["SentInitialHandshake"] = 3] = "SentInitialHandshake";
- SocksClientState[SocksClientState["ReceivedInitialHandshakeResponse"] = 4] = "ReceivedInitialHandshakeResponse";
- SocksClientState[SocksClientState["SentAuthentication"] = 5] = "SentAuthentication";
- SocksClientState[SocksClientState["ReceivedAuthenticationResponse"] = 6] = "ReceivedAuthenticationResponse";
- SocksClientState[SocksClientState["SentFinalHandshake"] = 7] = "SentFinalHandshake";
- SocksClientState[SocksClientState["ReceivedFinalResponse"] = 8] = "ReceivedFinalResponse";
- SocksClientState[SocksClientState["BoundWaitingForConnection"] = 9] = "BoundWaitingForConnection";
- SocksClientState[SocksClientState["Established"] = 10] = "Established";
- SocksClientState[SocksClientState["Disconnected"] = 11] = "Disconnected";
- SocksClientState[SocksClientState["Error"] = 99] = "Error";
-})(SocksClientState || (SocksClientState = {}));
-exports.SocksClientState = SocksClientState;
-//# sourceMappingURL=constants.js.map
+Object.defineProperty(proto, 'version', {
+ get: function () {
+ return this.package.version
+ },
+ enumerable: true
+})
-/***/ }),
-/* 207 */,
-/* 208 */,
-/* 209 */,
-/* 210 */
-/***/ (function(__unusedmodule, exports) {
+module.exports = exports = gyp
-// Generated by CoffeeScript 1.12.7
-(function() {
- "use strict";
- exports.stripBOM = function(str) {
- if (str[0] === '\uFEFF') {
- return str.substring(1);
- } else {
- return str;
- }
- };
-}).call(this);
+/***/ }),
+/* 128 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+"use strict";
-/***/ }),
-/* 211 */
-/***/ (function(module) {
-module.exports = require("https");
+module.exports = spawn
-/***/ }),
-/* 212 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+const _spawn = __webpack_require__(129).spawn
+const EventEmitter = __webpack_require__(614).EventEmitter
-"use strict";
+let progressEnabled
+let running = 0
+function startRunning (log) {
+ if (progressEnabled == null) progressEnabled = log.progressEnabled
+ if (progressEnabled) log.disableProgress()
+ ++running
+}
-const BB = __webpack_require__(489)
+function stopRunning (log) {
+ --running
+ if (progressEnabled && running === 0) log.enableProgress()
+}
-const cacache = __webpack_require__(426)
-const Fetcher = __webpack_require__(404)
-const fs = __webpack_require__(747)
-const pipe = BB.promisify(__webpack_require__(371).pipe)
-const through = __webpack_require__(371).through
+function willCmdOutput (stdio) {
+ if (stdio === 'inherit') return true
+ if (!Array.isArray(stdio)) return false
+ for (let fh = 1; fh <= 2; ++fh) {
+ if (stdio[fh] === 'inherit') return true
+ if (stdio[fh] === 1 || stdio[fh] === 2) return true
+ }
+ return false
+}
-const readFileAsync = BB.promisify(fs.readFile)
-const statAsync = BB.promisify(fs.stat)
+function spawn (cmd, args, options, log) {
+ const cmdWillOutput = willCmdOutput(options && options.stdio)
-const MAX_BULK_SIZE = 2 * 1024 * 1024 // 2MB
+ if (cmdWillOutput) startRunning(log)
+ const raw = _spawn(cmd, args, options)
+ const cooked = new EventEmitter()
-// `file` packages refer to local tarball files.
-const fetchFile = module.exports = Object.create(null)
+ raw.on('error', function (er) {
+ if (cmdWillOutput) stopRunning(log)
+ er.file = cmd
+ cooked.emit('error', er)
+ }).on('close', function (code, signal) {
+ if (cmdWillOutput) stopRunning(log)
+ // Create ENOENT error because Node.js v8.0 will not emit
+ // an `error` event if the command could not be found.
+ if (code === 127) {
+ const er = new Error('spawn ENOENT')
+ er.code = 'ENOENT'
+ er.errno = 'ENOENT'
+ er.syscall = 'spawn'
+ er.file = cmd
+ cooked.emit('error', er)
+ } else {
+ cooked.emit('close', code, signal)
+ }
+ })
-Fetcher.impl(fetchFile, {
- packument (spec, opts) {
- return BB.reject(new Error('Not implemented yet'))
- },
+ cooked.stdin = raw.stdin
+ cooked.stdout = raw.stdout
+ cooked.stderr = raw.stderr
+ cooked.kill = function (sig) { return raw.kill(sig) }
- manifest (spec, opts) {
- // We can't do much here. `finalizeManifest` will take care of
- // calling `tarball` to fill out all the necessary details.
- return BB.resolve(null)
- },
+ return cooked
+}
- // All the heavy lifting for `file` packages is done here.
- // They're never cached. We just read straight out of the file.
- // TODO - maybe they *should* be cached?
- tarball (spec, opts) {
- const src = spec._resolved || spec.fetchSpec
- const stream = through()
- statAsync(src).then(stat => {
- if (spec._resolved) { stream.emit('manifest', spec) }
- if (stat.size <= MAX_BULK_SIZE) {
- // YAY LET'S DO THING IN BULK
- return readFileAsync(src).then(data => {
- if (opts.cache) {
- return cacache.put(
- opts.cache, `pacote:tarball:file:${src}`, data, {
- integrity: opts.integrity
- }
- ).then(integrity => ({ data, integrity }))
- } else {
- return { data }
- }
- }).then(info => {
- if (info.integrity) { stream.emit('integrity', info.integrity) }
- stream.write(info.data, () => {
- stream.end()
- })
- })
- } else {
- let integrity
- const cacheWriter = !opts.cache
- ? BB.resolve(null)
- : (pipe(
- fs.createReadStream(src),
- cacache.put.stream(opts.cache, `pacote:tarball:${src}`, {
- integrity: opts.integrity
- }).on('integrity', d => { integrity = d })
- ))
- return cacheWriter.then(() => {
- if (integrity) { stream.emit('integrity', integrity) }
- return pipe(fs.createReadStream(src), stream)
- })
- }
- }).catch(err => stream.emit('error', err))
- return stream
- },
- fromManifest (manifest, spec, opts) {
- return this.tarball(manifest || spec, opts)
- }
-})
+/***/ }),
+/* 129 */
+/***/ (function(module) {
+module.exports = require("child_process");
/***/ }),
-/* 213 */
+/* 130 */,
+/* 131 */,
+/* 132 */
/***/ (function(module) {
-module.exports = require("punycode");
+module.exports = {"repositories":"'repositories' (plural) Not supported. Please pick one as the 'repository' field","missingRepository":"No repository field.","brokenGitUrl":"Probably broken git url: %s","nonObjectScripts":"scripts must be an object","nonStringScript":"script values must be string commands","nonArrayFiles":"Invalid 'files' member","invalidFilename":"Invalid filename in 'files' list: %s","nonArrayBundleDependencies":"Invalid 'bundleDependencies' list. Must be array of package names","nonStringBundleDependency":"Invalid bundleDependencies member: %s","nonDependencyBundleDependency":"Non-dependency in bundleDependencies: %s","nonObjectDependencies":"%s field must be an object","nonStringDependency":"Invalid dependency: %s %s","deprecatedArrayDependencies":"specifying %s as array is deprecated","deprecatedModules":"modules field is deprecated","nonArrayKeywords":"keywords should be an array of strings","nonStringKeyword":"keywords should be an array of strings","conflictingName":"%s is also the name of a node core module.","nonStringDescription":"'description' field should be a string","missingDescription":"No description","missingReadme":"No README data","missingLicense":"No license field.","nonEmailUrlBugsString":"Bug string field must be url, email, or {email,url}","nonUrlBugsUrlField":"bugs.url field must be a string url. Deleted.","nonEmailBugsEmailField":"bugs.email field must be a string email. Deleted.","emptyNormalizedBugs":"Normalized value of bugs field is an empty object. Deleted.","nonUrlHomepage":"homepage field must be a string url. Deleted.","invalidLicense":"license should be a valid SPDX license expression","typo":"%s should probably be %s."};
/***/ }),
-/* 214 */,
-/* 215 */
+/* 133 */
/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
-/* eslint-disable node/no-deprecated-api */
-
-var buffer = __webpack_require__(293)
-var Buffer = buffer.Buffer
-
-var safer = {}
+const BB = __webpack_require__(900)
-var key
+const chownr = BB.promisify(__webpack_require__(941))
+const mkdirp = BB.promisify(__webpack_require__(626))
+const inflight = __webpack_require__(399)
+const inferOwner = __webpack_require__(686)
-for (key in buffer) {
- if (!buffer.hasOwnProperty(key)) continue
- if (key === 'SlowBuffer' || key === 'Buffer') continue
- safer[key] = buffer[key]
+// Memoize getuid()/getgid() calls.
+// patch process.setuid/setgid to invalidate cached value on change
+const self = { uid: null, gid: null }
+const getSelf = () => {
+ if (typeof self.uid !== 'number') {
+ self.uid = process.getuid()
+ const setuid = process.setuid
+ process.setuid = (uid) => {
+ self.uid = null
+ process.setuid = setuid
+ return process.setuid(uid)
+ }
+ }
+ if (typeof self.gid !== 'number') {
+ self.gid = process.getgid()
+ const setgid = process.setgid
+ process.setgid = (gid) => {
+ self.gid = null
+ process.setgid = setgid
+ return process.setgid(gid)
+ }
+ }
}
-var Safer = safer.Buffer = {}
-for (key in Buffer) {
- if (!Buffer.hasOwnProperty(key)) continue
- if (key === 'allocUnsafe' || key === 'allocUnsafeSlow') continue
- Safer[key] = Buffer[key]
-}
+module.exports.chownr = fixOwner
+function fixOwner (cache, filepath) {
+ if (!process.getuid) {
+ // This platform doesn't need ownership fixing
+ return BB.resolve()
+ }
-safer.Buffer.prototype = Buffer.prototype
+ getSelf()
+ if (self.uid !== 0) {
+ // almost certainly can't chown anyway
+ return BB.resolve()
+ }
-if (!Safer.from || Safer.from === Uint8Array.from) {
- Safer.from = function (value, encodingOrOffset, length) {
- if (typeof value === 'number') {
- throw new TypeError('The "value" argument must not be of type number. Received type ' + typeof value)
- }
- if (value && typeof value.length === 'undefined') {
- throw new TypeError('The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type ' + typeof value)
+ return BB.resolve(inferOwner(cache)).then(owner => {
+ const { uid, gid } = owner
+
+ // No need to override if it's already what we used.
+ if (self.uid === uid && self.gid === gid) {
+ return
}
- return Buffer(value, encodingOrOffset, length)
- }
+
+ return inflight(
+ 'fixOwner: fixing ownership on ' + filepath,
+ () => chownr(
+ filepath,
+ typeof uid === 'number' ? uid : self.uid,
+ typeof gid === 'number' ? gid : self.gid
+ ).catch({ code: 'ENOENT' }, () => null)
+ )
+ })
}
-if (!Safer.alloc) {
- Safer.alloc = function (size, fill, encoding) {
- if (typeof size !== 'number') {
- throw new TypeError('The "size" argument must be of type number. Received type ' + typeof size)
- }
- if (size < 0 || size >= 2 * (1 << 30)) {
- throw new RangeError('The value "' + size + '" is invalid for option "size"')
- }
- var buf = Buffer(size)
- if (!fill || fill.length === 0) {
- buf.fill(0)
- } else if (typeof encoding === 'string') {
- buf.fill(fill, encoding)
- } else {
- buf.fill(fill)
+module.exports.chownr.sync = fixOwnerSync
+function fixOwnerSync (cache, filepath) {
+ if (!process.getuid) {
+ // This platform doesn't need ownership fixing
+ return
+ }
+ const { uid, gid } = inferOwner.sync(cache)
+ getSelf()
+ if (self.uid === uid && self.gid === gid) {
+ // No need to override if it's already what we used.
+ return
+ }
+ try {
+ chownr.sync(
+ filepath,
+ typeof uid === 'number' ? uid : self.uid,
+ typeof gid === 'number' ? gid : self.gid
+ )
+ } catch (err) {
+ // only catch ENOENT, any other error is a problem.
+ if (err.code === 'ENOENT') {
+ return null
}
- return buf
+ throw err
}
}
-if (!safer.kStringMaxLength) {
- try {
- safer.kStringMaxLength = process.binding('buffer').kStringMaxLength
- } catch (e) {
- // we can't determine kStringMaxLength in environments where process.binding
- // is unsupported, so let's not set it
- }
+module.exports.mkdirfix = mkdirfix
+function mkdirfix (cache, p, cb) {
+ // we have to infer the owner _before_ making the directory, even though
+ // we aren't going to use the results, since the cache itself might not
+ // exist yet. If we mkdirp it, then our current uid/gid will be assumed
+ // to be correct if it creates the cache folder in the process.
+ return BB.resolve(inferOwner(cache)).then(() => {
+ return mkdirp(p).then(made => {
+ if (made) {
+ return fixOwner(cache, made).then(() => made)
+ }
+ }).catch({ code: 'EEXIST' }, () => {
+ // There's a race in mkdirp!
+ return fixOwner(cache, p).then(() => null)
+ })
+ })
}
-if (!safer.constants) {
- safer.constants = {
- MAX_LENGTH: safer.kMaxLength
- }
- if (safer.kStringMaxLength) {
- safer.constants.MAX_STRING_LENGTH = safer.kStringMaxLength
+module.exports.mkdirfix.sync = mkdirfixSync
+function mkdirfixSync (cache, p) {
+ try {
+ inferOwner.sync(cache)
+ const made = mkdirp.sync(p)
+ if (made) {
+ fixOwnerSync(cache, made)
+ return made
+ }
+ } catch (err) {
+ if (err.code === 'EEXIST') {
+ fixOwnerSync(cache, p)
+ return null
+ } else {
+ throw err
+ }
}
}
-module.exports = safer
-
/***/ }),
-/* 216 */,
-/* 217 */,
-/* 218 */
+/* 134 */
/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
-module.exports = __webpack_require__(489).promisify(__webpack_require__(687))
+const BB = __webpack_require__(900)
+
+const fetch = __webpack_require__(789)
+const manifest = __webpack_require__(935)
+const optCheck = __webpack_require__(420)
+const PassThrough = __webpack_require__(794).PassThrough
+const ssri = __webpack_require__(951)
+const url = __webpack_require__(835)
+
+module.exports = tarball
+function tarball (spec, opts) {
+ opts = optCheck(opts)
+ const registry = fetch.pickRegistry(spec, opts)
+ const stream = new PassThrough()
+ let mani
+ if (
+ opts.resolved &&
+ // spec.type === 'version' &&
+ opts.resolved.indexOf(registry) === 0
+ ) {
+ // fakeChild is a shortcut to avoid looking up a manifest!
+ mani = BB.resolve({
+ name: spec.name,
+ version: spec.fetchSpec,
+ _integrity: opts.integrity,
+ _resolved: opts.resolved,
+ _fakeChild: true
+ })
+ } else {
+ // We can't trust opts.resolved if it's going to a separate host.
+ mani = manifest(spec, opts)
+ }
+
+ mani.then(mani => {
+ !mani._fakeChild && stream.emit('manifest', mani)
+ const fetchStream = fromManifest(mani, spec, opts).on(
+ 'integrity', i => stream.emit('integrity', i)
+ )
+ fetchStream.on('error', err => stream.emit('error', err))
+ fetchStream.pipe(stream)
+ return null
+ }).catch(err => stream.emit('error', err))
+ return stream
+}
+
+module.exports.fromManifest = fromManifest
+function fromManifest (manifest, spec, opts) {
+ opts = optCheck(opts)
+ if (spec.scope) { opts = opts.concat({ scope: spec.scope }) }
+ const stream = new PassThrough()
+ const registry = fetch.pickRegistry(spec, opts)
+ const uri = getTarballUrl(spec, registry, manifest, opts)
+ fetch(uri, opts.concat({
+ headers: {
+ 'pacote-req-type': 'tarball',
+ 'pacote-pkg-id': `registry:${manifest.name}@${uri}`
+ },
+ integrity: manifest._integrity,
+ algorithms: [
+ manifest._integrity
+ ? ssri.parse(manifest._integrity).pickAlgorithm()
+ : 'sha1'
+ ],
+ spec
+ }, opts))
+ .then(res => {
+ const hash = res.headers.get('x-local-cache-hash')
+ if (hash) {
+ stream.emit('integrity', decodeURIComponent(hash))
+ }
+ res.body.on('error', err => stream.emit('error', err))
+ res.body.pipe(stream)
+ return null
+ })
+ .catch(err => stream.emit('error', err))
+ return stream
+}
+
+function getTarballUrl (spec, registry, mani, opts) {
+ const reg = url.parse(registry)
+ const tarball = url.parse(mani._resolved)
+ // https://github.com/npm/npm/pull/9471
+ //
+ // TL;DR: Some alternative registries host tarballs on http and packuments
+ // on https, and vice-versa. There's also a case where people who can't use
+ // SSL to access the npm registry, for example, might use
+ // `--registry=http://registry.npmjs.org/`. In this case, we need to
+ // rewrite `tarball` to match the protocol.
+ //
+ if (reg.hostname === tarball.hostname && reg.protocol !== tarball.protocol) {
+ tarball.protocol = reg.protocol
+ // Ports might be same host different protocol!
+ if (reg.port !== tarball.port) {
+ delete tarball.host
+ tarball.port = reg.port
+ }
+ delete tarball.href
+ }
+ return url.format(tarball)
+}
/***/ }),
-/* 219 */,
-/* 220 */
-/***/ (function(__unusedmodule, exports) {
+/* 135 */
+/***/ (function(module) {
"use strict";
-/*
- * Copyright The OpenTelemetry Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-Object.defineProperty(exports, "__esModule", { value: true });
-//# sourceMappingURL=SpanOptions.js.map
+module.exports = function (Yallist) {
+ Yallist.prototype[Symbol.iterator] = function* () {
+ for (let walker = this.head; walker; walker = walker.next) {
+ yield walker.value
+ }
+ }
+}
+
/***/ }),
-/* 221 */,
-/* 222 */,
-/* 223 */,
-/* 224 */
+/* 136 */
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
@@ -17062,18992 +13569,19173 @@ Object.defineProperty(exports, "__esModule", { value: true });
* limitations under the License.
*/
Object.defineProperty(exports, "__esModule", { value: true });
-exports.NOOP_TRACER_PROVIDER = exports.NoopTracerProvider = void 0;
-var NoopTracer_1 = __webpack_require__(151);
+exports.MetricsAPI = void 0;
+var NoopMeterProvider_1 = __webpack_require__(450);
+var global_utils_1 = __webpack_require__(976);
/**
- * An implementation of the {@link TracerProvider} which returns an impotent
- * Tracer for all calls to `getTracer`.
- *
- * All operations are no-op.
+ * Singleton object which represents the entry point to the OpenTelemetry Metrics API
*/
-var NoopTracerProvider = /** @class */ (function () {
- function NoopTracerProvider() {
+var MetricsAPI = /** @class */ (function () {
+ /** Empty private constructor prevents end users from constructing a new instance of the API */
+ function MetricsAPI() {
}
- NoopTracerProvider.prototype.getTracer = function (_name, _version) {
- return NoopTracer_1.NOOP_TRACER;
+ /** Get the singleton instance of the Metrics API */
+ MetricsAPI.getInstance = function () {
+ if (!this._instance) {
+ this._instance = new MetricsAPI();
+ }
+ return this._instance;
};
- return NoopTracerProvider;
+ /**
+ * Set the current global meter. Returns the initialized global meter provider.
+ */
+ MetricsAPI.prototype.setGlobalMeterProvider = function (provider) {
+ if (global_utils_1._global[global_utils_1.GLOBAL_METRICS_API_KEY]) {
+ // global meter provider has already been set
+ return this.getMeterProvider();
+ }
+ global_utils_1._global[global_utils_1.GLOBAL_METRICS_API_KEY] = global_utils_1.makeGetter(global_utils_1.API_BACKWARDS_COMPATIBILITY_VERSION, provider, NoopMeterProvider_1.NOOP_METER_PROVIDER);
+ return provider;
+ };
+ /**
+ * Returns the global meter provider.
+ */
+ MetricsAPI.prototype.getMeterProvider = function () {
+ var _a, _b;
+ return ((_b = (_a = global_utils_1._global[global_utils_1.GLOBAL_METRICS_API_KEY]) === null || _a === void 0 ? void 0 : _a.call(global_utils_1._global, global_utils_1.API_BACKWARDS_COMPATIBILITY_VERSION)) !== null && _b !== void 0 ? _b : NoopMeterProvider_1.NOOP_METER_PROVIDER);
+ };
+ /**
+ * Returns a meter from the global meter provider.
+ */
+ MetricsAPI.prototype.getMeter = function (name, version) {
+ return this.getMeterProvider().getMeter(name, version);
+ };
+ /** Remove the global meter provider */
+ MetricsAPI.prototype.disable = function () {
+ delete global_utils_1._global[global_utils_1.GLOBAL_METRICS_API_KEY];
+ };
+ return MetricsAPI;
}());
-exports.NoopTracerProvider = NoopTracerProvider;
-exports.NOOP_TRACER_PROVIDER = new NoopTracerProvider();
-//# sourceMappingURL=NoopTracerProvider.js.map
+exports.MetricsAPI = MetricsAPI;
+//# sourceMappingURL=metrics.js.map
/***/ }),
-/* 225 */,
-/* 226 */
+/* 137 */
/***/ (function(module, __unusedexports, __webpack_require__) {
-"use strict";
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-
-
-/**/
-
-var pna = __webpack_require__(822);
-/**/
-
-module.exports = Readable;
+var eos = __webpack_require__(3)
+var shift = __webpack_require__(475)
-/**/
-var isArray = __webpack_require__(262);
-/**/
+module.exports = each
-/**/
-var Duplex;
-/**/
+function each (stream, fn, cb) {
+ var want = true
+ var error = null
+ var ended = false
+ var running = false
+ var calling = false
-Readable.ReadableState = ReadableState;
+ stream.on('readable', onreadable)
+ onreadable()
-/**/
-var EE = __webpack_require__(614).EventEmitter;
+ if (cb) eos(stream, {readable: true, writable: false}, done)
+ return stream
-var EElistenerCount = function (emitter, type) {
- return emitter.listeners(type).length;
-};
-/**/
+ function done (err) {
+ if (!error) error = err
+ ended = true
+ if (!running) cb(error)
+ }
-/**/
-var Stream = __webpack_require__(427);
-/**/
+ function onreadable () {
+ if (want) read()
+ }
-/**/
+ function afterRead (err) {
+ running = false
-var Buffer = __webpack_require__(254).Buffer;
-var OurUint8Array = global.Uint8Array || function () {};
-function _uint8ArrayToBuffer(chunk) {
- return Buffer.from(chunk);
-}
-function _isUint8Array(obj) {
- return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
-}
+ if (err) {
+ error = err
+ if (ended) return cb(error)
+ stream.destroy(err)
+ return
+ }
+ if (ended) return cb(error)
+ if (!calling) read()
+ }
-/**/
+ function read () {
+ while (!running && !ended) {
+ want = false
-/**/
-var util = Object.create(__webpack_require__(286));
-util.inherits = __webpack_require__(689);
-/**/
+ var data = shift(stream)
+ if (ended) return
+ if (data === null) {
+ want = true
+ return
+ }
-/**/
-var debugUtil = __webpack_require__(669);
-var debug = void 0;
-if (debugUtil && debugUtil.debuglog) {
- debug = debugUtil.debuglog('stream');
-} else {
- debug = function () {};
+ running = true
+ calling = true
+ fn(data, afterRead)
+ calling = false
+ }
+ }
}
-/**/
-var BufferList = __webpack_require__(76);
-var destroyImpl = __webpack_require__(232);
-var StringDecoder;
-util.inherits(Readable, Stream);
+/***/ }),
+/* 138 */,
+/* 139 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
-var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume'];
+// Unique ID creation requires a high quality random # generator. In node.js
+// this is pretty straight-forward - we use the crypto API.
-function prependListener(emitter, event, fn) {
- // Sadly this is not cacheable as some libraries bundle their own
- // event emitter implementation with them.
- if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn);
+var crypto = __webpack_require__(417);
- // This is a hack to make sure that our error handler is attached before any
- // userland ones. NEVER DO THIS. This is here only because this code needs
- // to continue to work with older versions of Node.js that do not include
- // the prependListener() method. The goal is to eventually remove this hack.
- if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]];
-}
+module.exports = function nodeRNG() {
+ return crypto.randomBytes(16);
+};
-function ReadableState(options, stream) {
- Duplex = Duplex || __webpack_require__(907);
- options = options || {};
+/***/ }),
+/* 140 */
+/***/ (function(module) {
- // Duplex streams are both readable and writable, but share
- // the same options object.
- // However, some cases require setting options to different
- // values for the readable and the writable sides of the duplex stream.
- // These options can be provided separately as readableXXX and writableXXX.
- var isDuplex = stream instanceof Duplex;
+"use strict";
- // object stream flag. Used to make read(n) ignore n and to
- // make all the buffer merging and length checks go away
- this.objectMode = !!options.objectMode;
- if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode;
+function isArguments (thingy) {
+ return thingy != null && typeof thingy === 'object' && thingy.hasOwnProperty('callee')
+}
- // the point at which it stops calling _read() to fill the buffer
- // Note: 0 is a valid value, means "don't call _read preemptively ever"
- var hwm = options.highWaterMark;
- var readableHwm = options.readableHighWaterMark;
- var defaultHwm = this.objectMode ? 16 : 16 * 1024;
+var types = {
+ '*': {label: 'any', check: function () { return true }},
+ A: {label: 'array', check: function (thingy) { return Array.isArray(thingy) || isArguments(thingy) }},
+ S: {label: 'string', check: function (thingy) { return typeof thingy === 'string' }},
+ N: {label: 'number', check: function (thingy) { return typeof thingy === 'number' }},
+ F: {label: 'function', check: function (thingy) { return typeof thingy === 'function' }},
+ O: {label: 'object', check: function (thingy) { return typeof thingy === 'object' && thingy != null && !types.A.check(thingy) && !types.E.check(thingy) }},
+ B: {label: 'boolean', check: function (thingy) { return typeof thingy === 'boolean' }},
+ E: {label: 'error', check: function (thingy) { return thingy instanceof Error }},
+ Z: {label: 'null', check: function (thingy) { return thingy == null }}
+}
- if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm;
+function addSchema (schema, arity) {
+ var group = arity[schema.length] = arity[schema.length] || []
+ if (group.indexOf(schema) === -1) group.push(schema)
+}
- // cast to ints.
- this.highWaterMark = Math.floor(this.highWaterMark);
+var validate = module.exports = function (rawSchemas, args) {
+ if (arguments.length !== 2) throw wrongNumberOfArgs(['SA'], arguments.length)
+ if (!rawSchemas) throw missingRequiredArg(0, 'rawSchemas')
+ if (!args) throw missingRequiredArg(1, 'args')
+ if (!types.S.check(rawSchemas)) throw invalidType(0, ['string'], rawSchemas)
+ if (!types.A.check(args)) throw invalidType(1, ['array'], args)
+ var schemas = rawSchemas.split('|')
+ var arity = {}
- // A linked list is used to store data chunks instead of an array because the
- // linked list can remove elements from the beginning faster than
- // array.shift()
- this.buffer = new BufferList();
- this.length = 0;
- this.pipes = null;
- this.pipesCount = 0;
- this.flowing = null;
- this.ended = false;
- this.endEmitted = false;
- this.reading = false;
+ schemas.forEach(function (schema) {
+ for (var ii = 0; ii < schema.length; ++ii) {
+ var type = schema[ii]
+ if (!types[type]) throw unknownType(ii, type)
+ }
+ if (/E.*E/.test(schema)) throw moreThanOneError(schema)
+ addSchema(schema, arity)
+ if (/E/.test(schema)) {
+ addSchema(schema.replace(/E.*$/, 'E'), arity)
+ addSchema(schema.replace(/E/, 'Z'), arity)
+ if (schema.length === 1) addSchema('', arity)
+ }
+ })
+ var matching = arity[args.length]
+ if (!matching) {
+ throw wrongNumberOfArgs(Object.keys(arity), args.length)
+ }
+ for (var ii = 0; ii < args.length; ++ii) {
+ var newMatching = matching.filter(function (schema) {
+ var type = schema[ii]
+ var typeCheck = types[type].check
+ return typeCheck(args[ii])
+ })
+ if (!newMatching.length) {
+ var labels = matching.map(function (schema) {
+ return types[schema[ii]].label
+ }).filter(function (schema) { return schema != null })
+ throw invalidType(ii, labels, args[ii])
+ }
+ matching = newMatching
+ }
+}
- // a flag to be able to tell if the event 'readable'/'data' is emitted
- // immediately, or on a later tick. We set this to true at first, because
- // any actions that shouldn't happen until "later" should generally also
- // not happen before the first read call.
- this.sync = true;
+function missingRequiredArg (num) {
+ return newException('EMISSINGARG', 'Missing required argument #' + (num + 1))
+}
- // whenever we return null, then we set a flag to say
- // that we're awaiting a 'readable' event emission.
- this.needReadable = false;
- this.emittedReadable = false;
- this.readableListening = false;
- this.resumeScheduled = false;
+function unknownType (num, type) {
+ return newException('EUNKNOWNTYPE', 'Unknown type ' + type + ' in argument #' + (num + 1))
+}
- // has it been destroyed
- this.destroyed = false;
+function invalidType (num, expectedTypes, value) {
+ var valueType
+ Object.keys(types).forEach(function (typeCode) {
+ if (types[typeCode].check(value)) valueType = types[typeCode].label
+ })
+ return newException('EINVALIDTYPE', 'Argument #' + (num + 1) + ': Expected ' +
+ englishList(expectedTypes) + ' but got ' + valueType)
+}
- // Crypto is kind of old and crusty. Historically, its default string
- // encoding is 'binary' so we have to make this configurable.
- // Everything else in the universe uses 'utf8', though.
- this.defaultEncoding = options.defaultEncoding || 'utf8';
+function englishList (list) {
+ return list.join(', ').replace(/, ([^,]+)$/, ' or $1')
+}
- // the number of writers that are awaiting a drain event in .pipe()s
- this.awaitDrain = 0;
+function wrongNumberOfArgs (expected, got) {
+ var english = englishList(expected)
+ var args = expected.every(function (ex) { return ex.length === 1 })
+ ? 'argument'
+ : 'arguments'
+ return newException('EWRONGARGCOUNT', 'Expected ' + english + ' ' + args + ' but got ' + got)
+}
- // if true, a maybeReadMore has been scheduled
- this.readingMore = false;
+function moreThanOneError (schema) {
+ return newException('ETOOMANYERRORTYPES',
+ 'Only one error type per argument signature is allowed, more than one found in "' + schema + '"')
+}
- this.decoder = null;
- this.encoding = null;
- if (options.encoding) {
- if (!StringDecoder) StringDecoder = __webpack_require__(432).StringDecoder;
- this.decoder = new StringDecoder(options.encoding);
- this.encoding = options.encoding;
- }
+function newException (code, msg) {
+ var e = new Error(msg)
+ e.code = code
+ if (Error.captureStackTrace) Error.captureStackTrace(e, validate)
+ return e
}
-function Readable(options) {
- Duplex = Duplex || __webpack_require__(907);
- if (!(this instanceof Readable)) return new Readable(options);
+/***/ }),
+/* 141 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
- this._readableState = new ReadableState(options, this);
+"use strict";
- // legacy
- this.readable = true;
- if (options) {
- if (typeof options.read === 'function') this._read = options.read;
+var net = __webpack_require__(631);
+var tls = __webpack_require__(16);
+var http = __webpack_require__(605);
+var https = __webpack_require__(211);
+var events = __webpack_require__(614);
+var assert = __webpack_require__(357);
+var util = __webpack_require__(669);
- if (typeof options.destroy === 'function') this._destroy = options.destroy;
- }
- Stream.call(this);
+exports.httpOverHttp = httpOverHttp;
+exports.httpsOverHttp = httpsOverHttp;
+exports.httpOverHttps = httpOverHttps;
+exports.httpsOverHttps = httpsOverHttps;
+
+
+function httpOverHttp(options) {
+ var agent = new TunnelingAgent(options);
+ agent.request = http.request;
+ return agent;
}
-Object.defineProperty(Readable.prototype, 'destroyed', {
- get: function () {
- if (this._readableState === undefined) {
- return false;
- }
- return this._readableState.destroyed;
- },
- set: function (value) {
- // we ignore the value if the stream
- // has not been initialized yet
- if (!this._readableState) {
- return;
- }
+function httpsOverHttp(options) {
+ var agent = new TunnelingAgent(options);
+ agent.request = http.request;
+ agent.createSocket = createSecureSocket;
+ agent.defaultPort = 443;
+ return agent;
+}
- // backward compatibility, the user is explicitly
- // managing destroyed
- this._readableState.destroyed = value;
- }
-});
+function httpOverHttps(options) {
+ var agent = new TunnelingAgent(options);
+ agent.request = https.request;
+ return agent;
+}
-Readable.prototype.destroy = destroyImpl.destroy;
-Readable.prototype._undestroy = destroyImpl.undestroy;
-Readable.prototype._destroy = function (err, cb) {
- this.push(null);
- cb(err);
-};
+function httpsOverHttps(options) {
+ var agent = new TunnelingAgent(options);
+ agent.request = https.request;
+ agent.createSocket = createSecureSocket;
+ agent.defaultPort = 443;
+ return agent;
+}
-// Manually shove something into the read() buffer.
-// This returns true if the highWaterMark has not been hit yet,
-// similar to how Writable.write() returns true if you should
-// write() some more.
-Readable.prototype.push = function (chunk, encoding) {
- var state = this._readableState;
- var skipChunkCheck;
- if (!state.objectMode) {
- if (typeof chunk === 'string') {
- encoding = encoding || state.defaultEncoding;
- if (encoding !== state.encoding) {
- chunk = Buffer.from(chunk, encoding);
- encoding = '';
+function TunnelingAgent(options) {
+ var self = this;
+ self.options = options || {};
+ self.proxyOptions = self.options.proxy || {};
+ self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;
+ self.requests = [];
+ self.sockets = [];
+
+ self.on('free', function onFree(socket, host, port, localAddress) {
+ var options = toOptions(host, port, localAddress);
+ for (var i = 0, len = self.requests.length; i < len; ++i) {
+ var pending = self.requests[i];
+ if (pending.host === options.host && pending.port === options.port) {
+ // Detect the request to connect same origin server,
+ // reuse the connection.
+ self.requests.splice(i, 1);
+ pending.request.onSocket(socket);
+ return;
}
- skipChunkCheck = true;
}
- } else {
- skipChunkCheck = true;
+ socket.destroy();
+ self.removeSocket(socket);
+ });
+}
+util.inherits(TunnelingAgent, events.EventEmitter);
+
+TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {
+ var self = this;
+ var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));
+
+ if (self.sockets.length >= this.maxSockets) {
+ // We are over limit so we'll add it to the queue.
+ self.requests.push(options);
+ return;
}
- return readableAddChunk(this, chunk, encoding, false, skipChunkCheck);
-};
+ // If we are under maxSockets create a new one.
+ self.createSocket(options, function(socket) {
+ socket.on('free', onFree);
+ socket.on('close', onCloseOrRemove);
+ socket.on('agentRemove', onCloseOrRemove);
+ req.onSocket(socket);
-// Unshift should *always* be something directly out of read()
-Readable.prototype.unshift = function (chunk) {
- return readableAddChunk(this, chunk, null, true, false);
+ function onFree() {
+ self.emit('free', socket, options);
+ }
+
+ function onCloseOrRemove(err) {
+ self.removeSocket(socket);
+ socket.removeListener('free', onFree);
+ socket.removeListener('close', onCloseOrRemove);
+ socket.removeListener('agentRemove', onCloseOrRemove);
+ }
+ });
};
-function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) {
- var state = stream._readableState;
- if (chunk === null) {
- state.reading = false;
- onEofChunk(stream, state);
- } else {
- var er;
- if (!skipChunkCheck) er = chunkInvalid(state, chunk);
- if (er) {
- stream.emit('error', er);
- } else if (state.objectMode || chunk && chunk.length > 0) {
- if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) {
- chunk = _uint8ArrayToBuffer(chunk);
- }
+TunnelingAgent.prototype.createSocket = function createSocket(options, cb) {
+ var self = this;
+ var placeholder = {};
+ self.sockets.push(placeholder);
- if (addToFront) {
- if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true);
- } else if (state.ended) {
- stream.emit('error', new Error('stream.push() after EOF'));
- } else {
- state.reading = false;
- if (state.decoder && !encoding) {
- chunk = state.decoder.write(chunk);
- if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state);
- } else {
- addChunk(stream, state, chunk, false);
- }
- }
- } else if (!addToFront) {
- state.reading = false;
+ var connectOptions = mergeOptions({}, self.proxyOptions, {
+ method: 'CONNECT',
+ path: options.host + ':' + options.port,
+ agent: false,
+ headers: {
+ host: options.host + ':' + options.port
}
+ });
+ if (options.localAddress) {
+ connectOptions.localAddress = options.localAddress;
+ }
+ if (connectOptions.proxyAuth) {
+ connectOptions.headers = connectOptions.headers || {};
+ connectOptions.headers['Proxy-Authorization'] = 'Basic ' +
+ new Buffer(connectOptions.proxyAuth).toString('base64');
}
- return needMoreData(state);
-}
+ debug('making CONNECT request');
+ var connectReq = self.request(connectOptions);
+ connectReq.useChunkedEncodingByDefault = false; // for v0.6
+ connectReq.once('response', onResponse); // for v0.6
+ connectReq.once('upgrade', onUpgrade); // for v0.6
+ connectReq.once('connect', onConnect); // for v0.7 or later
+ connectReq.once('error', onError);
+ connectReq.end();
-function addChunk(stream, state, chunk, addToFront) {
- if (state.flowing && state.length === 0 && !state.sync) {
- stream.emit('data', chunk);
- stream.read(0);
- } else {
- // update the buffer info.
- state.length += state.objectMode ? 1 : chunk.length;
- if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);
+ function onResponse(res) {
+ // Very hacky. This is necessary to avoid http-parser leaks.
+ res.upgrade = true;
+ }
- if (state.needReadable) emitReadable(stream);
+ function onUpgrade(res, socket, head) {
+ // Hacky.
+ process.nextTick(function() {
+ onConnect(res, socket, head);
+ });
}
- maybeReadMore(stream, state);
-}
-function chunkInvalid(state, chunk) {
- var er;
- if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
- er = new TypeError('Invalid non-string/buffer chunk');
+ function onConnect(res, socket, head) {
+ connectReq.removeAllListeners();
+ socket.removeAllListeners();
+
+ if (res.statusCode !== 200) {
+ debug('tunneling socket could not be established, statusCode=%d',
+ res.statusCode);
+ socket.destroy();
+ var error = new Error('tunneling socket could not be established, ' +
+ 'statusCode=' + res.statusCode);
+ error.code = 'ECONNRESET';
+ options.request.emit('error', error);
+ self.removeSocket(placeholder);
+ return;
+ }
+ if (head.length > 0) {
+ debug('got illegal response body from proxy');
+ socket.destroy();
+ var error = new Error('got illegal response body from proxy');
+ error.code = 'ECONNRESET';
+ options.request.emit('error', error);
+ self.removeSocket(placeholder);
+ return;
+ }
+ debug('tunneling connection has established');
+ self.sockets[self.sockets.indexOf(placeholder)] = socket;
+ return cb(socket);
}
- return er;
-}
-// if it's past the high water mark, we can push in some more.
-// Also, if we have no data yet, we can stand some
-// more bytes. This is to work around cases where hwm=0,
-// such as the repl. Also, if the push() triggered a
-// readable event, and the user called read(largeNumber) such that
-// needReadable was set, then we ought to push more, so that another
-// 'readable' event will be triggered.
-function needMoreData(state) {
- return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0);
-}
+ function onError(cause) {
+ connectReq.removeAllListeners();
-Readable.prototype.isPaused = function () {
- return this._readableState.flowing === false;
+ debug('tunneling socket could not be established, cause=%s\n',
+ cause.message, cause.stack);
+ var error = new Error('tunneling socket could not be established, ' +
+ 'cause=' + cause.message);
+ error.code = 'ECONNRESET';
+ options.request.emit('error', error);
+ self.removeSocket(placeholder);
+ }
};
-// backwards compatibility.
-Readable.prototype.setEncoding = function (enc) {
- if (!StringDecoder) StringDecoder = __webpack_require__(432).StringDecoder;
- this._readableState.decoder = new StringDecoder(enc);
- this._readableState.encoding = enc;
- return this;
+TunnelingAgent.prototype.removeSocket = function removeSocket(socket) {
+ var pos = this.sockets.indexOf(socket)
+ if (pos === -1) {
+ return;
+ }
+ this.sockets.splice(pos, 1);
+
+ var pending = this.requests.shift();
+ if (pending) {
+ // If we have pending requests and a socket gets closed a new one
+ // needs to be created to take over in the pool for the one that closed.
+ this.createSocket(pending, function(socket) {
+ pending.request.onSocket(socket);
+ });
+ }
};
-// Don't raise the hwm > 8MB
-var MAX_HWM = 0x800000;
-function computeNewHighWaterMark(n) {
- if (n >= MAX_HWM) {
- n = MAX_HWM;
- } else {
- // Get the next highest power of 2 to prevent increasing hwm excessively in
- // tiny amounts
- n--;
- n |= n >>> 1;
- n |= n >>> 2;
- n |= n >>> 4;
- n |= n >>> 8;
- n |= n >>> 16;
- n++;
+function createSecureSocket(options, cb) {
+ var self = this;
+ TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {
+ var hostHeader = options.request.getHeader('host');
+ var tlsOptions = mergeOptions({}, self.options, {
+ socket: socket,
+ servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host
+ });
+
+ // 0 is dummy port for v0.6
+ var secureSocket = tls.connect(0, tlsOptions);
+ self.sockets[self.sockets.indexOf(socket)] = secureSocket;
+ cb(secureSocket);
+ });
+}
+
+
+function toOptions(host, port, localAddress) {
+ if (typeof host === 'string') { // since v0.10
+ return {
+ host: host,
+ port: port,
+ localAddress: localAddress
+ };
}
- return n;
+ return host; // for v0.11 or later
}
-// This function is designed to be inlinable, so please take care when making
-// changes to the function body.
-function howMuchToRead(n, state) {
- if (n <= 0 || state.length === 0 && state.ended) return 0;
- if (state.objectMode) return 1;
- if (n !== n) {
- // Only flow one buffer at a time
- if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;
+function mergeOptions(target) {
+ for (var i = 1, len = arguments.length; i < len; ++i) {
+ var overrides = arguments[i];
+ if (typeof overrides === 'object') {
+ var keys = Object.keys(overrides);
+ for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {
+ var k = keys[j];
+ if (overrides[k] !== undefined) {
+ target[k] = overrides[k];
+ }
+ }
+ }
}
- // If we're asking for more than the current hwm, then raise the hwm.
- if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);
- if (n <= state.length) return n;
- // Don't have enough
- if (!state.ended) {
- state.needReadable = true;
- return 0;
+ return target;
+}
+
+
+var debug;
+if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) {
+ debug = function() {
+ var args = Array.prototype.slice.call(arguments);
+ if (typeof args[0] === 'string') {
+ args[0] = 'TUNNEL: ' + args[0];
+ } else {
+ args.unshift('TUNNEL:');
+ }
+ console.error.apply(console, args);
}
- return state.length;
+} else {
+ debug = function() {};
}
+exports.debug = debug; // for test
-// you can override either this method, or the async _read(n) below.
-Readable.prototype.read = function (n) {
- debug('read', n);
- n = parseInt(n, 10);
- var state = this._readableState;
- var nOrig = n;
- if (n !== 0) state.emittedReadable = false;
+/***/ }),
+/* 142 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
- // if we're doing read(0) to trigger a readable event, but we
- // already have a bunch of data in the buffer, then just trigger
- // the 'readable' event and move on.
- if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) {
- debug('read: emitReadable', state.length, state.ended);
- if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);
- return null;
- }
+module.exports = which
+which.sync = whichSync
- n = howMuchToRead(n, state);
+var isWindows = process.platform === 'win32' ||
+ process.env.OSTYPE === 'cygwin' ||
+ process.env.OSTYPE === 'msys'
- // if we've ended, and we're now clear, then finish it up.
- if (n === 0 && state.ended) {
- if (state.length === 0) endReadable(this);
- return null;
- }
+var path = __webpack_require__(622)
+var COLON = isWindows ? ';' : ':'
+var isexe = __webpack_require__(742)
- // All the actual chunk generation logic needs to be
- // *below* the call to _read. The reason is that in certain
- // synthetic stream cases, such as passthrough streams, _read
- // may be a completely synchronous operation which may change
- // the state of the read buffer, providing enough data when
- // before there was *not* enough.
- //
- // So, the steps are:
- // 1. Figure out what the state of things will be after we do
- // a read from the buffer.
- //
- // 2. If that resulting state will trigger a _read, then call _read.
- // Note that this may be asynchronous, or synchronous. Yes, it is
- // deeply ugly to write APIs this way, but that still doesn't mean
- // that the Readable class should behave improperly, as streams are
- // designed to be sync/async agnostic.
- // Take note if the _read call is sync or async (ie, if the read call
- // has returned yet), so that we know whether or not it's safe to emit
- // 'readable' etc.
- //
- // 3. Actually pull the requested chunks out of the buffer and return.
-
- // if we need a readable event, then we need to do some reading.
- var doRead = state.needReadable;
- debug('need readable', doRead);
-
- // if we currently have less than the highWaterMark, then also read some
- if (state.length === 0 || state.length - n < state.highWaterMark) {
- doRead = true;
- debug('length less than watermark', doRead);
- }
-
- // however, if we've ended, then there's no point, and if we're already
- // reading, then it's unnecessary.
- if (state.ended || state.reading) {
- doRead = false;
- debug('reading or ended', doRead);
- } else if (doRead) {
- debug('do read');
- state.reading = true;
- state.sync = true;
- // if the length is currently zero, then we *need* a readable event.
- if (state.length === 0) state.needReadable = true;
- // call internal read method
- this._read(state.highWaterMark);
- state.sync = false;
- // If _read pushed data synchronously, then `reading` will be false,
- // and we need to re-evaluate how much data we can return to the user.
- if (!state.reading) n = howMuchToRead(nOrig, state);
- }
-
- var ret;
- if (n > 0) ret = fromList(n, state);else ret = null;
-
- if (ret === null) {
- state.needReadable = true;
- n = 0;
- } else {
- state.length -= n;
- }
-
- if (state.length === 0) {
- // If we have nothing in the buffer, then we want to know
- // as soon as we *do* get something into the buffer.
- if (!state.ended) state.needReadable = true;
-
- // If we tried to read() past the EOF, then emit end on the next tick.
- if (nOrig !== n && state.ended) endReadable(this);
- }
-
- if (ret !== null) this.emit('data', ret);
-
- return ret;
-};
-
-function onEofChunk(stream, state) {
- if (state.ended) return;
- if (state.decoder) {
- var chunk = state.decoder.end();
- if (chunk && chunk.length) {
- state.buffer.push(chunk);
- state.length += state.objectMode ? 1 : chunk.length;
- }
- }
- state.ended = true;
-
- // emit 'readable' now to make sure it gets picked up.
- emitReadable(stream);
-}
-
-// Don't emit readable right away in sync mode, because this can trigger
-// another read() call => stack overflow. This way, it might trigger
-// a nextTick recursion warning, but that's not so bad.
-function emitReadable(stream) {
- var state = stream._readableState;
- state.needReadable = false;
- if (!state.emittedReadable) {
- debug('emitReadable', state.flowing);
- state.emittedReadable = true;
- if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream);
- }
-}
+function getNotFoundError (cmd) {
+ var er = new Error('not found: ' + cmd)
+ er.code = 'ENOENT'
-function emitReadable_(stream) {
- debug('emit readable');
- stream.emit('readable');
- flow(stream);
+ return er
}
-// at this point, the user has presumably seen the 'readable' event,
-// and called read() to consume some data. that may have triggered
-// in turn another _read(n) call, in which case reading = true if
-// it's in progress.
-// However, if we're not ended, or reading, and the length < hwm,
-// then go ahead and try to read some more preemptively.
-function maybeReadMore(stream, state) {
- if (!state.readingMore) {
- state.readingMore = true;
- pna.nextTick(maybeReadMore_, stream, state);
- }
-}
+function getPathInfo (cmd, opt) {
+ var colon = opt.colon || COLON
+ var pathEnv = opt.path || process.env.PATH || ''
+ var pathExt = ['']
-function maybeReadMore_(stream, state) {
- var len = state.length;
- while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) {
- debug('maybeReadMore read 0');
- stream.read(0);
- if (len === state.length)
- // didn't get any data, stop spinning.
- break;else len = state.length;
- }
- state.readingMore = false;
-}
+ pathEnv = pathEnv.split(colon)
-// abstract method. to be overridden in specific implementation classes.
-// call cb(er, data) where data is <= n in length.
-// for virtual (non-string, non-buffer) streams, "length" is somewhat
-// arbitrary, and perhaps not very meaningful.
-Readable.prototype._read = function (n) {
- this.emit('error', new Error('_read() is not implemented'));
-};
+ var pathExtExe = ''
+ if (isWindows) {
+ pathEnv.unshift(process.cwd())
+ pathExtExe = (opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM')
+ pathExt = pathExtExe.split(colon)
-Readable.prototype.pipe = function (dest, pipeOpts) {
- var src = this;
- var state = this._readableState;
- switch (state.pipesCount) {
- case 0:
- state.pipes = dest;
- break;
- case 1:
- state.pipes = [state.pipes, dest];
- break;
- default:
- state.pipes.push(dest);
- break;
+ // Always test the cmd itself first. isexe will check to make sure
+ // it's found in the pathExt set.
+ if (cmd.indexOf('.') !== -1 && pathExt[0] !== '')
+ pathExt.unshift('')
}
- state.pipesCount += 1;
- debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
-
- var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;
- var endFn = doEnd ? onend : unpipe;
- if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn);
+ // If it has a slash, then we don't bother searching the pathenv.
+ // just check the file itself, and that's it.
+ if (cmd.match(/\//) || isWindows && cmd.match(/\\/))
+ pathEnv = ['']
- dest.on('unpipe', onunpipe);
- function onunpipe(readable, unpipeInfo) {
- debug('onunpipe');
- if (readable === src) {
- if (unpipeInfo && unpipeInfo.hasUnpiped === false) {
- unpipeInfo.hasUnpiped = true;
- cleanup();
- }
- }
+ return {
+ env: pathEnv,
+ ext: pathExt,
+ extExe: pathExtExe
}
+}
- function onend() {
- debug('onend');
- dest.end();
+function which (cmd, opt, cb) {
+ if (typeof opt === 'function') {
+ cb = opt
+ opt = {}
}
- // when the dest drains, it reduces the awaitDrain counter
- // on the source. This would be more elegant with a .once()
- // handler in flow(), but adding and removing repeatedly is
- // too slow.
- var ondrain = pipeOnDrain(src);
- dest.on('drain', ondrain);
-
- var cleanedUp = false;
- function cleanup() {
- debug('cleanup');
- // cleanup event handlers once the pipe is broken
- dest.removeListener('close', onclose);
- dest.removeListener('finish', onfinish);
- dest.removeListener('drain', ondrain);
- dest.removeListener('error', onerror);
- dest.removeListener('unpipe', onunpipe);
- src.removeListener('end', onend);
- src.removeListener('end', unpipe);
- src.removeListener('data', ondata);
-
- cleanedUp = true;
-
- // if the reader is waiting for a drain event from this
- // specific writer, then it would cause it to never start
- // flowing again.
- // So, if this is awaiting a drain, then we just call it now.
- // If we don't know, then assume that we are waiting for one.
- if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();
- }
+ var info = getPathInfo(cmd, opt)
+ var pathEnv = info.env
+ var pathExt = info.ext
+ var pathExtExe = info.extExe
+ var found = []
- // If the user pushes more data while we're writing to dest then we'll end up
- // in ondata again. However, we only want to increase awaitDrain once because
- // dest will only emit one 'drain' event for the multiple writes.
- // => Introduce a guard on increasing awaitDrain.
- var increasedAwaitDrain = false;
- src.on('data', ondata);
- function ondata(chunk) {
- debug('ondata');
- increasedAwaitDrain = false;
- var ret = dest.write(chunk);
- if (false === ret && !increasedAwaitDrain) {
- // If the user unpiped during `dest.write()`, it is possible
- // to get stuck in a permanently paused state if that write
- // also returned false.
- // => Check whether `dest` is still a piping destination.
- if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {
- debug('false write response, pause', src._readableState.awaitDrain);
- src._readableState.awaitDrain++;
- increasedAwaitDrain = true;
- }
- src.pause();
+ ;(function F (i, l) {
+ if (i === l) {
+ if (opt.all && found.length)
+ return cb(null, found)
+ else
+ return cb(getNotFoundError(cmd))
}
- }
-
- // if the dest has an error, then stop piping into it.
- // however, don't suppress the throwing behavior for this.
- function onerror(er) {
- debug('onerror', er);
- unpipe();
- dest.removeListener('error', onerror);
- if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er);
- }
-
- // Make sure our error handler is attached before userland ones.
- prependListener(dest, 'error', onerror);
-
- // Both close and finish should trigger unpipe, but only once.
- function onclose() {
- dest.removeListener('finish', onfinish);
- unpipe();
- }
- dest.once('close', onclose);
- function onfinish() {
- debug('onfinish');
- dest.removeListener('close', onclose);
- unpipe();
- }
- dest.once('finish', onfinish);
-
- function unpipe() {
- debug('unpipe');
- src.unpipe(dest);
- }
-
- // tell the dest that it's being piped to
- dest.emit('pipe', src);
-
- // start the flow if it hasn't been started already.
- if (!state.flowing) {
- debug('pipe resume');
- src.resume();
- }
- return dest;
-};
+ var pathPart = pathEnv[i]
+ if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"')
+ pathPart = pathPart.slice(1, -1)
-function pipeOnDrain(src) {
- return function () {
- var state = src._readableState;
- debug('pipeOnDrain', state.awaitDrain);
- if (state.awaitDrain) state.awaitDrain--;
- if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {
- state.flowing = true;
- flow(src);
+ var p = path.join(pathPart, cmd)
+ if (!pathPart && (/^\.[\\\/]/).test(cmd)) {
+ p = cmd.slice(0, 2) + p
}
- };
+ ;(function E (ii, ll) {
+ if (ii === ll) return F(i + 1, l)
+ var ext = pathExt[ii]
+ isexe(p + ext, { pathExt: pathExtExe }, function (er, is) {
+ if (!er && is) {
+ if (opt.all)
+ found.push(p + ext)
+ else
+ return cb(null, p + ext)
+ }
+ return E(ii + 1, ll)
+ })
+ })(0, pathExt.length)
+ })(0, pathEnv.length)
}
-Readable.prototype.unpipe = function (dest) {
- var state = this._readableState;
- var unpipeInfo = { hasUnpiped: false };
-
- // if we're not piping anywhere, then do nothing.
- if (state.pipesCount === 0) return this;
-
- // just one destination. most common case.
- if (state.pipesCount === 1) {
- // passed in one, but it's not the right one.
- if (dest && dest !== state.pipes) return this;
-
- if (!dest) dest = state.pipes;
-
- // got a match.
- state.pipes = null;
- state.pipesCount = 0;
- state.flowing = false;
- if (dest) dest.emit('unpipe', this, unpipeInfo);
- return this;
- }
-
- // slow case. multiple pipe destinations.
-
- if (!dest) {
- // remove all.
- var dests = state.pipes;
- var len = state.pipesCount;
- state.pipes = null;
- state.pipesCount = 0;
- state.flowing = false;
-
- for (var i = 0; i < len; i++) {
- dests[i].emit('unpipe', this, unpipeInfo);
- }return this;
- }
-
- // try to find the right one.
- var index = indexOf(state.pipes, dest);
- if (index === -1) return this;
-
- state.pipes.splice(index, 1);
- state.pipesCount -= 1;
- if (state.pipesCount === 1) state.pipes = state.pipes[0];
-
- dest.emit('unpipe', this, unpipeInfo);
+function whichSync (cmd, opt) {
+ opt = opt || {}
- return this;
-};
+ var info = getPathInfo(cmd, opt)
+ var pathEnv = info.env
+ var pathExt = info.ext
+ var pathExtExe = info.extExe
+ var found = []
-// set up data events if they are asked for
-// Ensure readable listeners eventually get something
-Readable.prototype.on = function (ev, fn) {
- var res = Stream.prototype.on.call(this, ev, fn);
+ for (var i = 0, l = pathEnv.length; i < l; i ++) {
+ var pathPart = pathEnv[i]
+ if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"')
+ pathPart = pathPart.slice(1, -1)
- if (ev === 'data') {
- // Start flowing on next tick if stream isn't explicitly paused
- if (this._readableState.flowing !== false) this.resume();
- } else if (ev === 'readable') {
- var state = this._readableState;
- if (!state.endEmitted && !state.readableListening) {
- state.readableListening = state.needReadable = true;
- state.emittedReadable = false;
- if (!state.reading) {
- pna.nextTick(nReadingNextTick, this);
- } else if (state.length) {
- emitReadable(this);
- }
+ var p = path.join(pathPart, cmd)
+ if (!pathPart && /^\.[\\\/]/.test(cmd)) {
+ p = cmd.slice(0, 2) + p
+ }
+ for (var j = 0, ll = pathExt.length; j < ll; j ++) {
+ var cur = p + pathExt[j]
+ var is
+ try {
+ is = isexe.sync(cur, { pathExt: pathExtExe })
+ if (is) {
+ if (opt.all)
+ found.push(cur)
+ else
+ return cur
+ }
+ } catch (ex) {}
}
}
- return res;
-};
-Readable.prototype.addListener = Readable.prototype.on;
-
-function nReadingNextTick(self) {
- debug('readable nexttick read 0');
- self.read(0);
-}
-
-// pause() and resume() are remnants of the legacy readable stream API
-// If the user uses them, then switch into old mode.
-Readable.prototype.resume = function () {
- var state = this._readableState;
- if (!state.flowing) {
- debug('resume');
- state.flowing = true;
- resume(this, state);
- }
- return this;
-};
-
-function resume(stream, state) {
- if (!state.resumeScheduled) {
- state.resumeScheduled = true;
- pna.nextTick(resume_, stream, state);
- }
-}
-
-function resume_(stream, state) {
- if (!state.reading) {
- debug('resume read 0');
- stream.read(0);
- }
-
- state.resumeScheduled = false;
- state.awaitDrain = 0;
- stream.emit('resume');
- flow(stream);
- if (state.flowing && !state.reading) stream.read(0);
-}
+ if (opt.all && found.length)
+ return found
-Readable.prototype.pause = function () {
- debug('call pause flowing=%j', this._readableState.flowing);
- if (false !== this._readableState.flowing) {
- debug('pause');
- this._readableState.flowing = false;
- this.emit('pause');
- }
- return this;
-};
+ if (opt.nothrow)
+ return null
-function flow(stream) {
- var state = stream._readableState;
- debug('flow', state.flowing);
- while (state.flowing && stream.read() !== null) {}
+ throw getNotFoundError(cmd)
}
-// wrap an old-style stream as the async data source.
-// This is *not* part of the readable stream interface.
-// It is an ugly unfortunate mess of history.
-Readable.prototype.wrap = function (stream) {
- var _this = this;
-
- var state = this._readableState;
- var paused = false;
- stream.on('end', function () {
- debug('wrapped end');
- if (state.decoder && !state.ended) {
- var chunk = state.decoder.end();
- if (chunk && chunk.length) _this.push(chunk);
- }
+/***/ }),
+/* 143 */
+/***/ (function(module) {
- _this.push(null);
- });
+// parse out just the options we care about so we always get a consistent
+// obj with keys in a consistent order.
+const opts = ['includePrerelease', 'loose', 'rtl']
+const parseOptions = options =>
+ !options ? {}
+ : typeof options !== 'object' ? { loose: true }
+ : opts.filter(k => options[k]).reduce((options, k) => {
+ options[k] = true
+ return options
+ }, {})
+module.exports = parseOptions
- stream.on('data', function (chunk) {
- debug('wrapped data');
- if (state.decoder) chunk = state.decoder.write(chunk);
- // don't skip over falsy values in objectMode
- if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;
+/***/ }),
+/* 144 */
+/***/ (function(module) {
- var ret = _this.push(chunk);
- if (!ret) {
- paused = true;
- stream.pause();
- }
- });
-
- // proxy all the other methods.
- // important when wrapping filters and duplexes.
- for (var i in stream) {
- if (this[i] === undefined && typeof stream[i] === 'function') {
- this[i] = function (method) {
- return function () {
- return stream[method].apply(stream, arguments);
- };
- }(i);
- }
- }
-
- // proxy certain important events.
- for (var n = 0; n < kProxyEvents.length; n++) {
- stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n]));
- }
-
- // when we try to consume some more bytes, simply unpause the
- // underlying stream.
- this._read = function (n) {
- debug('wrapped _read', n);
- if (paused) {
- paused = false;
- stream.resume();
- }
- };
-
- return this;
-};
+/*! *****************************************************************************
+Copyright (c) Microsoft Corporation.
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
+***************************************************************************** */
+/* global global, define, System, Reflect, Promise */
+var __extends;
+var __assign;
+var __rest;
+var __decorate;
+var __param;
+var __metadata;
+var __awaiter;
+var __generator;
+var __exportStar;
+var __values;
+var __read;
+var __spread;
+var __spreadArrays;
+var __await;
+var __asyncGenerator;
+var __asyncDelegator;
+var __asyncValues;
+var __makeTemplateObject;
+var __importStar;
+var __importDefault;
+var __classPrivateFieldGet;
+var __classPrivateFieldSet;
+var __createBinding;
+(function (factory) {
+ var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {};
+ if (typeof define === "function" && define.amd) {
+ define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); });
+ }
+ else if ( true && typeof module.exports === "object") {
+ factory(createExporter(root, createExporter(module.exports)));
+ }
+ else {
+ factory(createExporter(root));
+ }
+ function createExporter(exports, previous) {
+ if (exports !== root) {
+ if (typeof Object.create === "function") {
+ Object.defineProperty(exports, "__esModule", { value: true });
+ }
+ else {
+ exports.__esModule = true;
+ }
+ }
+ return function (id, v) { return exports[id] = previous ? previous(id, v) : v; };
+ }
+})
+(function (exporter) {
+ var extendStatics = Object.setPrototypeOf ||
+ ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
+ function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
+
+ __extends = function (d, b) {
+ extendStatics(d, b);
+ function __() { this.constructor = d; }
+ d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
+ };
+
+ __assign = Object.assign || function (t) {
+ for (var s, i = 1, n = arguments.length; i < n; i++) {
+ s = arguments[i];
+ for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];
+ }
+ return t;
+ };
+
+ __rest = function (s, e) {
+ var t = {};
+ for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
+ t[p] = s[p];
+ if (s != null && typeof Object.getOwnPropertySymbols === "function")
+ for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
+ if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
+ t[p[i]] = s[p[i]];
+ }
+ return t;
+ };
+
+ __decorate = function (decorators, target, key, desc) {
+ var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
+ if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
+ else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
+ return c > 3 && r && Object.defineProperty(target, key, r), r;
+ };
+
+ __param = function (paramIndex, decorator) {
+ return function (target, key) { decorator(target, key, paramIndex); }
+ };
+
+ __metadata = function (metadataKey, metadataValue) {
+ if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue);
+ };
+
+ __awaiter = function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+ };
+
+ __generator = function (thisArg, body) {
+ var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
+ return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
+ function verb(n) { return function (v) { return step([n, v]); }; }
+ function step(op) {
+ if (f) throw new TypeError("Generator is already executing.");
+ while (_) try {
+ if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
+ if (y = 0, t) op = [op[0] & 2, t.value];
+ switch (op[0]) {
+ case 0: case 1: t = op; break;
+ case 4: _.label++; return { value: op[1], done: false };
+ case 5: _.label++; y = op[1]; op = [0]; continue;
+ case 7: op = _.ops.pop(); _.trys.pop(); continue;
+ default:
+ if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
+ if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
+ if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
+ if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
+ if (t[2]) _.ops.pop();
+ _.trys.pop(); continue;
+ }
+ op = body.call(thisArg, _);
+ } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
+ if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
+ }
+ };
+
+ __exportStar = function(m, o) {
+ for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);
+ };
+
+ __createBinding = Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+ }) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+ });
+
+ __values = function (o) {
+ var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0;
+ if (m) return m.call(o);
+ if (o && typeof o.length === "number") return {
+ next: function () {
+ if (o && i >= o.length) o = void 0;
+ return { value: o && o[i++], done: !o };
+ }
+ };
+ throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined.");
+ };
+
+ __read = function (o, n) {
+ var m = typeof Symbol === "function" && o[Symbol.iterator];
+ if (!m) return o;
+ var i = m.call(o), r, ar = [], e;
+ try {
+ while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);
+ }
+ catch (error) { e = { error: error }; }
+ finally {
+ try {
+ if (r && !r.done && (m = i["return"])) m.call(i);
+ }
+ finally { if (e) throw e.error; }
+ }
+ return ar;
+ };
+
+ __spread = function () {
+ for (var ar = [], i = 0; i < arguments.length; i++)
+ ar = ar.concat(__read(arguments[i]));
+ return ar;
+ };
+
+ __spreadArrays = function () {
+ for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;
+ for (var r = Array(s), k = 0, i = 0; i < il; i++)
+ for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)
+ r[k] = a[j];
+ return r;
+ };
+
+ __await = function (v) {
+ return this instanceof __await ? (this.v = v, this) : new __await(v);
+ };
+
+ __asyncGenerator = function (thisArg, _arguments, generator) {
+ if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
+ var g = generator.apply(thisArg, _arguments || []), i, q = [];
+ return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
+ function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
+ function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
+ function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
+ function fulfill(value) { resume("next", value); }
+ function reject(value) { resume("throw", value); }
+ function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
+ };
+
+ __asyncDelegator = function (o) {
+ var i, p;
+ return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i;
+ function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; }
+ };
+
+ __asyncValues = function (o) {
+ if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
+ var m = o[Symbol.asyncIterator], i;
+ return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
+ function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
+ function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
+ };
+
+ __makeTemplateObject = function (cooked, raw) {
+ if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; }
+ return cooked;
+ };
+
+ var __setModuleDefault = Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+ }) : function(o, v) {
+ o["default"] = v;
+ };
+
+ __importStar = function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+ };
+
+ __importDefault = function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+ };
+
+ __classPrivateFieldGet = function (receiver, privateMap) {
+ if (!privateMap.has(receiver)) {
+ throw new TypeError("attempted to get private field on non-instance");
+ }
+ return privateMap.get(receiver);
+ };
+
+ __classPrivateFieldSet = function (receiver, privateMap, value) {
+ if (!privateMap.has(receiver)) {
+ throw new TypeError("attempted to set private field on non-instance");
+ }
+ privateMap.set(receiver, value);
+ return value;
+ };
+
+ exporter("__extends", __extends);
+ exporter("__assign", __assign);
+ exporter("__rest", __rest);
+ exporter("__decorate", __decorate);
+ exporter("__param", __param);
+ exporter("__metadata", __metadata);
+ exporter("__awaiter", __awaiter);
+ exporter("__generator", __generator);
+ exporter("__exportStar", __exportStar);
+ exporter("__createBinding", __createBinding);
+ exporter("__values", __values);
+ exporter("__read", __read);
+ exporter("__spread", __spread);
+ exporter("__spreadArrays", __spreadArrays);
+ exporter("__await", __await);
+ exporter("__asyncGenerator", __asyncGenerator);
+ exporter("__asyncDelegator", __asyncDelegator);
+ exporter("__asyncValues", __asyncValues);
+ exporter("__makeTemplateObject", __makeTemplateObject);
+ exporter("__importStar", __importStar);
+ exporter("__importDefault", __importDefault);
+ exporter("__classPrivateFieldGet", __classPrivateFieldGet);
+ exporter("__classPrivateFieldSet", __classPrivateFieldSet);
+});
-Object.defineProperty(Readable.prototype, 'readableHighWaterMark', {
- // making it explicit this property is not enumerable
- // because otherwise some prototype manipulation in
- // userland will fail
- enumerable: false,
- get: function () {
- return this._readableState.highWaterMark;
- }
-});
-// exposed for testing purposes only.
-Readable._fromList = fromList;
+/***/ }),
+/* 145 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
-// Pluck off n bytes from an array of buffers.
-// Length is the combined lengths of all the buffers in the list.
-// This function is designed to be inlinable, so please take care when making
-// changes to the function body.
-function fromList(n, state) {
- // nothing buffered
- if (state.length === 0) return null;
+"use strict";
- var ret;
- if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {
- // read it all, truncate the list
- if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length);
- state.buffer.clear();
- } else {
- // read part of list
- ret = fromListPartial(n, state.buffer, state.decoder);
- }
+const pump = __webpack_require__(284);
+const bufferStream = __webpack_require__(927);
- return ret;
+class MaxBufferError extends Error {
+ constructor() {
+ super('maxBuffer exceeded');
+ this.name = 'MaxBufferError';
+ }
}
-// Extracts only enough buffered data to satisfy the amount requested.
-// This function is designed to be inlinable, so please take care when making
-// changes to the function body.
-function fromListPartial(n, list, hasStrings) {
- var ret;
- if (n < list.head.data.length) {
- // slice is the same for buffers and strings
- ret = list.head.data.slice(0, n);
- list.head.data = list.head.data.slice(n);
- } else if (n === list.head.data.length) {
- // first chunk is a perfect match
- ret = list.shift();
- } else {
- // result spans more than one buffer
- ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list);
- }
- return ret;
-}
+function getStream(inputStream, options) {
+ if (!inputStream) {
+ return Promise.reject(new Error('Expected a stream'));
+ }
-// Copies a specified amount of characters from the list of buffered data
-// chunks.
-// This function is designed to be inlinable, so please take care when making
-// changes to the function body.
-function copyFromBufferString(n, list) {
- var p = list.head;
- var c = 1;
- var ret = p.data;
- n -= ret.length;
- while (p = p.next) {
- var str = p.data;
- var nb = n > str.length ? str.length : n;
- if (nb === str.length) ret += str;else ret += str.slice(0, n);
- n -= nb;
- if (n === 0) {
- if (nb === str.length) {
- ++c;
- if (p.next) list.head = p.next;else list.head = list.tail = null;
- } else {
- list.head = p;
- p.data = str.slice(nb);
- }
- break;
- }
- ++c;
- }
- list.length -= c;
- return ret;
-}
+ options = Object.assign({maxBuffer: Infinity}, options);
-// Copies a specified amount of bytes from the list of buffered data chunks.
-// This function is designed to be inlinable, so please take care when making
-// changes to the function body.
-function copyFromBuffer(n, list) {
- var ret = Buffer.allocUnsafe(n);
- var p = list.head;
- var c = 1;
- p.data.copy(ret);
- n -= p.data.length;
- while (p = p.next) {
- var buf = p.data;
- var nb = n > buf.length ? buf.length : n;
- buf.copy(ret, ret.length - n, 0, nb);
- n -= nb;
- if (n === 0) {
- if (nb === buf.length) {
- ++c;
- if (p.next) list.head = p.next;else list.head = list.tail = null;
- } else {
- list.head = p;
- p.data = buf.slice(nb);
- }
- break;
- }
- ++c;
- }
- list.length -= c;
- return ret;
-}
+ const {maxBuffer} = options;
-function endReadable(stream) {
- var state = stream._readableState;
+ let stream;
+ return new Promise((resolve, reject) => {
+ const rejectPromise = error => {
+ if (error) { // A null check
+ error.bufferedData = stream.getBufferedValue();
+ }
+ reject(error);
+ };
- // If we get here before consuming all the bytes, then that is a
- // bug in node. Should never happen.
- if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream');
+ stream = pump(inputStream, bufferStream(options), error => {
+ if (error) {
+ rejectPromise(error);
+ return;
+ }
- if (!state.endEmitted) {
- state.ended = true;
- pna.nextTick(endReadableNT, state, stream);
- }
-}
+ resolve();
+ });
-function endReadableNT(state, stream) {
- // Check that we didn't get one last unshift.
- if (!state.endEmitted && state.length === 0) {
- state.endEmitted = true;
- stream.readable = false;
- stream.emit('end');
- }
+ stream.on('data', () => {
+ if (stream.getBufferedLength() > maxBuffer) {
+ rejectPromise(new MaxBufferError());
+ }
+ });
+ }).then(() => stream.getBufferedValue());
}
-function indexOf(xs, x) {
- for (var i = 0, l = xs.length; i < l; i++) {
- if (xs[i] === x) return i;
- }
- return -1;
-}
+module.exports = getStream;
+module.exports.buffer = (stream, options) => getStream(stream, Object.assign({}, options, {encoding: 'buffer'}));
+module.exports.array = (stream, options) => getStream(stream, Object.assign({}, options, {array: true}));
+module.exports.MaxBufferError = MaxBufferError;
+
/***/ }),
-/* 227 */,
-/* 228 */,
-/* 229 */
-/***/ (function(__unusedmodule, exports) {
+/* 146 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
+/**
+ * refer:
+ * * @atimb "Real keep-alive HTTP agent": https://gist.github.com/2963672
+ * * https://github.com/joyent/node/blob/master/lib/http.js
+ * * https://github.com/joyent/node/blob/master/lib/https.js
+ * * https://github.com/joyent/node/blob/master/lib/_http_agent.js
+ */
-Object.defineProperty(exports, '__esModule', { value: true });
-// Copyright (c) Microsoft Corporation.
-// Licensed under the MIT license.
-/**
- * A static-key-based credential that supports updating
- * the underlying key value.
- */
-var AzureKeyCredential = /** @class */ (function () {
- /**
- * Create an instance of an AzureKeyCredential for use
- * with a service client.
- *
- * @param key the initial value of the key to use in authentication
- */
- function AzureKeyCredential(key) {
- if (!key) {
- throw new Error("key must be a non-empty string");
- }
- this._key = key;
- }
- Object.defineProperty(AzureKeyCredential.prototype, "key", {
- /**
- * The value of the key to be used in authentication
- */
- get: function () {
- return this._key;
- },
- enumerable: false,
- configurable: true
- });
- /**
- * Change the value of the key.
- *
- * Updates will take effect upon the next request after
- * updating the key value.
- *
- * @param newKey the new key value to be used
- */
- AzureKeyCredential.prototype.update = function (newKey) {
- this._key = newKey;
- };
- return AzureKeyCredential;
-}());
-
-// Copyright (c) Microsoft Corporation.
-// Licensed under the MIT license.
-/**
- * Tests an object to determine whether it implements TokenCredential.
- *
- * @param credential The assumed TokenCredential to be tested.
- */
-function isTokenCredential(credential) {
- // Check for an object with a 'getToken' function and possibly with
- // a 'signRequest' function. We do this check to make sure that
- // a ServiceClientCredentials implementor (like TokenClientCredentials
- // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if
- // it doesn't actually implement TokenCredential also.
- return (credential &&
- typeof credential.getToken === "function" &&
- (credential.signRequest === undefined || credential.getToken.length > 0));
-}
-
-exports.AzureKeyCredential = AzureKeyCredential;
-exports.isTokenCredential = isTokenCredential;
-//# sourceMappingURL=index.js.map
-
-
-/***/ }),
-/* 230 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+const OriginalAgent = __webpack_require__(998).Agent;
+const ms = __webpack_require__(337);
-"use strict";
+class Agent extends OriginalAgent {
+ constructor(options) {
+ options = options || {};
+ options.keepAlive = options.keepAlive !== false;
+ // default is keep-alive and 15s free socket timeout
+ if (options.freeSocketKeepAliveTimeout === undefined) {
+ options.freeSocketKeepAliveTimeout = 15000;
+ }
+ // Legacy API: keepAliveTimeout should be rename to `freeSocketKeepAliveTimeout`
+ if (options.keepAliveTimeout) {
+ options.freeSocketKeepAliveTimeout = options.keepAliveTimeout;
+ }
+ options.freeSocketKeepAliveTimeout = ms(options.freeSocketKeepAliveTimeout);
+ // Sets the socket to timeout after timeout milliseconds of inactivity on the socket.
+ // By default is double free socket keepalive timeout.
+ if (options.timeout === undefined) {
+ options.timeout = options.freeSocketKeepAliveTimeout * 2;
+ // make sure socket default inactivity timeout >= 30s
+ if (options.timeout < 30000) {
+ options.timeout = 30000;
+ }
+ }
+ options.timeout = ms(options.timeout);
-module.exports = __webpack_require__(412)
+ super(options);
+ this.createSocketCount = 0;
+ this.createSocketCountLastCheck = 0;
-/***/ }),
-/* 231 */,
-/* 232 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+ this.createSocketErrorCount = 0;
+ this.createSocketErrorCountLastCheck = 0;
-"use strict";
+ this.closeSocketCount = 0;
+ this.closeSocketCountLastCheck = 0;
+ // socket error event count
+ this.errorSocketCount = 0;
+ this.errorSocketCountLastCheck = 0;
-/**/
+ this.requestCount = 0;
+ this.requestCountLastCheck = 0;
-var pna = __webpack_require__(822);
-/**/
+ this.timeoutSocketCount = 0;
+ this.timeoutSocketCountLastCheck = 0;
-// undocumented cb() API, needed for core, not for public API
-function destroy(err, cb) {
- var _this = this;
+ this.on('free', s => {
+ this.requestCount++;
+ // last enter free queue timestamp
+ s.lastFreeTime = Date.now();
+ });
+ this.on('timeout', () => {
+ this.timeoutSocketCount++;
+ });
+ this.on('close', () => {
+ this.closeSocketCount++;
+ });
+ this.on('error', () => {
+ this.errorSocketCount++;
+ });
+ }
- var readableDestroyed = this._readableState && this._readableState.destroyed;
- var writableDestroyed = this._writableState && this._writableState.destroyed;
+ createSocket(req, options, cb) {
+ super.createSocket(req, options, (err, socket) => {
+ if (err) {
+ this.createSocketErrorCount++;
+ return cb(err);
+ }
+ if (this.keepAlive) {
+ // Disable Nagle's algorithm: http://blog.caustik.com/2012/04/08/scaling-node-js-to-100k-concurrent-connections/
+ // https://fengmk2.com/benchmark/nagle-algorithm-delayed-ack-mock.html
+ socket.setNoDelay(true);
+ }
+ this.createSocketCount++;
+ cb(null, socket);
+ });
+ }
- if (readableDestroyed || writableDestroyed) {
- if (cb) {
- cb(err);
- } else if (err && (!this._writableState || !this._writableState.errorEmitted)) {
- pna.nextTick(emitErrorNT, this, err);
+ get statusChanged() {
+ const changed = this.createSocketCount !== this.createSocketCountLastCheck ||
+ this.createSocketErrorCount !== this.createSocketErrorCountLastCheck ||
+ this.closeSocketCount !== this.closeSocketCountLastCheck ||
+ this.errorSocketCount !== this.errorSocketCountLastCheck ||
+ this.timeoutSocketCount !== this.timeoutSocketCountLastCheck ||
+ this.requestCount !== this.requestCountLastCheck;
+ if (changed) {
+ this.createSocketCountLastCheck = this.createSocketCount;
+ this.createSocketErrorCountLastCheck = this.createSocketErrorCount;
+ this.closeSocketCountLastCheck = this.closeSocketCount;
+ this.errorSocketCountLastCheck = this.errorSocketCount;
+ this.timeoutSocketCountLastCheck = this.timeoutSocketCount;
+ this.requestCountLastCheck = this.requestCount;
}
- return this;
+ return changed;
}
- // we set destroyed to true before firing error callbacks in order
- // to make it re-entrance safe in case destroy() is called within callbacks
-
- if (this._readableState) {
- this._readableState.destroyed = true;
+ getCurrentStatus() {
+ return {
+ createSocketCount: this.createSocketCount,
+ createSocketErrorCount: this.createSocketErrorCount,
+ closeSocketCount: this.closeSocketCount,
+ errorSocketCount: this.errorSocketCount,
+ timeoutSocketCount: this.timeoutSocketCount,
+ requestCount: this.requestCount,
+ freeSockets: inspect(this.freeSockets),
+ sockets: inspect(this.sockets),
+ requests: inspect(this.requests),
+ };
}
+}
- // if this is a duplex stream mark the writable part as destroyed as well
- if (this._writableState) {
- this._writableState.destroyed = true;
+module.exports = Agent;
+
+function inspect(obj) {
+ const res = {};
+ for (const key in obj) {
+ res[key] = obj[key].length;
}
+ return res;
+}
- this._destroy(err || null, function (err) {
- if (!cb && err) {
- pna.nextTick(emitErrorNT, _this, err);
- if (_this._writableState) {
- _this._writableState.errorEmitted = true;
- }
- } else if (cb) {
- cb(err);
- }
- });
- return this;
-}
+/***/ }),
+/* 147 */
+/***/ (function(__unusedmodule, exports) {
-function undestroy() {
- if (this._readableState) {
- this._readableState.destroyed = false;
- this._readableState.reading = false;
- this._readableState.ended = false;
- this._readableState.endEmitted = false;
- }
+"use strict";
- if (this._writableState) {
- this._writableState.destroyed = false;
- this._writableState.ended = false;
- this._writableState.ending = false;
- this._writableState.finished = false;
- this._writableState.errorEmitted = false;
- }
+
+exports.fromCallback = function (fn) {
+ return Object.defineProperty(function () {
+ if (typeof arguments[arguments.length - 1] === 'function') fn.apply(this, arguments)
+ else {
+ return new Promise((resolve, reject) => {
+ arguments[arguments.length] = (err, res) => {
+ if (err) return reject(err)
+ resolve(res)
+ }
+ arguments.length++
+ fn.apply(this, arguments)
+ })
+ }
+ }, 'name', { value: fn.name })
}
-function emitErrorNT(self, err) {
- self.emit('error', err);
+exports.fromPromise = function (fn) {
+ return Object.defineProperty(function () {
+ const cb = arguments[arguments.length - 1]
+ if (typeof cb !== 'function') return fn.apply(this, arguments)
+ else fn.apply(this, arguments).then(r => cb(null, r), cb)
+ }, 'name', { value: fn.name })
}
-module.exports = {
- destroy: destroy,
- undestroy: undestroy
-};
/***/ }),
-/* 233 */
-/***/ (function(module, exports, __webpack_require__) {
+/* 148 */,
+/* 149 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
-/* eslint-disable node/no-deprecated-api */
-var buffer = __webpack_require__(293)
-var Buffer = buffer.Buffer
+"use strict";
-// alternative to using Object.keys for old browsers
-function copyProps (src, dst) {
- for (var key in src) {
- dst[key] = src[key]
- }
-}
-if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {
- module.exports = buffer
-} else {
- // Copy properties from require('buffer')
- copyProps(buffer, exports)
- exports.Buffer = SafeBuffer
-}
+module.exports =
+ function(Promise, PromiseArray, debug) {
+var PromiseInspection = Promise.PromiseInspection;
+var util = __webpack_require__(248);
-function SafeBuffer (arg, encodingOrOffset, length) {
- return Buffer(arg, encodingOrOffset, length)
+function SettledPromiseArray(values) {
+ this.constructor$(values);
}
+util.inherits(SettledPromiseArray, PromiseArray);
-// Copy static methods from Buffer
-copyProps(Buffer, SafeBuffer)
+SettledPromiseArray.prototype._promiseResolved = function (index, inspection) {
+ this._values[index] = inspection;
+ var totalResolved = ++this._totalResolved;
+ if (totalResolved >= this._length) {
+ this._resolve(this._values);
+ return true;
+ }
+ return false;
+};
-SafeBuffer.from = function (arg, encodingOrOffset, length) {
- if (typeof arg === 'number') {
- throw new TypeError('Argument must not be a number')
- }
- return Buffer(arg, encodingOrOffset, length)
-}
+SettledPromiseArray.prototype._promiseFulfilled = function (value, index) {
+ var ret = new PromiseInspection();
+ ret._bitField = 33554432;
+ ret._settledValueField = value;
+ return this._promiseResolved(index, ret);
+};
+SettledPromiseArray.prototype._promiseRejected = function (reason, index) {
+ var ret = new PromiseInspection();
+ ret._bitField = 16777216;
+ ret._settledValueField = reason;
+ return this._promiseResolved(index, ret);
+};
-SafeBuffer.alloc = function (size, fill, encoding) {
- if (typeof size !== 'number') {
- throw new TypeError('Argument must be a number')
- }
- var buf = Buffer(size)
- if (fill !== undefined) {
- if (typeof encoding === 'string') {
- buf.fill(fill, encoding)
- } else {
- buf.fill(fill)
- }
- } else {
- buf.fill(0)
- }
- return buf
-}
+Promise.settle = function (promises) {
+ debug.deprecated(".settle()", ".reflect()");
+ return new SettledPromiseArray(promises).promise();
+};
-SafeBuffer.allocUnsafe = function (size) {
- if (typeof size !== 'number') {
- throw new TypeError('Argument must be a number')
- }
- return Buffer(size)
-}
+Promise.allSettled = function (promises) {
+ return new SettledPromiseArray(promises).promise();
+};
-SafeBuffer.allocUnsafeSlow = function (size) {
- if (typeof size !== 'number') {
- throw new TypeError('Argument must be a number')
- }
- return buffer.SlowBuffer(size)
-}
+Promise.prototype.settle = function () {
+ return Promise.settle(this);
+};
+};
/***/ }),
-/* 234 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+/* 150 */,
+/* 151 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
-__webpack_require__(812);
-const inherits = __webpack_require__(669).inherits;
-const promisify = __webpack_require__(662);
-const EventEmitter = __webpack_require__(614).EventEmitter;
+/*
+ * Copyright The OpenTelemetry Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.NOOP_TRACER = exports.NoopTracer = void 0;
+var NoopSpan_1 = __webpack_require__(767);
+/**
+ * No-op implementations of {@link Tracer}.
+ */
+var NoopTracer = /** @class */ (function () {
+ function NoopTracer() {
+ }
+ NoopTracer.prototype.getCurrentSpan = function () {
+ return NoopSpan_1.NOOP_SPAN;
+ };
+ // startSpan starts a noop span.
+ NoopTracer.prototype.startSpan = function (name, options) {
+ return NoopSpan_1.NOOP_SPAN;
+ };
+ NoopTracer.prototype.withSpan = function (span, fn) {
+ return fn();
+ };
+ NoopTracer.prototype.bind = function (target, span) {
+ return target;
+ };
+ return NoopTracer;
+}());
+exports.NoopTracer = NoopTracer;
+exports.NOOP_TRACER = new NoopTracer();
+//# sourceMappingURL=NoopTracer.js.map
-module.exports = Agent;
+/***/ }),
+/* 152 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
-function isAgent(v) {
- return v && typeof v.addRequest === 'function';
+var Stream = __webpack_require__(794).Stream;
+var util = __webpack_require__(669);
+
+module.exports = DelayedStream;
+function DelayedStream() {
+ this.source = null;
+ this.dataSize = 0;
+ this.maxDataSize = 1024 * 1024;
+ this.pauseStream = true;
+
+ this._maxDataSizeExceeded = false;
+ this._released = false;
+ this._bufferedEvents = [];
}
+util.inherits(DelayedStream, Stream);
-/**
- * Base `http.Agent` implementation.
- * No pooling/keep-alive is implemented by default.
- *
- * @param {Function} callback
- * @api public
- */
-function Agent(callback, _opts) {
- if (!(this instanceof Agent)) {
- return new Agent(callback, _opts);
+DelayedStream.create = function(source, options) {
+ var delayedStream = new this();
+
+ options = options || {};
+ for (var option in options) {
+ delayedStream[option] = options[option];
}
- EventEmitter.call(this);
+ delayedStream.source = source;
- // The callback gets promisified if it has 3 parameters
- // (i.e. it has a callback function) lazily
- this._promisifiedCallback = false;
+ var realEmit = source.emit;
+ source.emit = function() {
+ delayedStream._handleEmit(arguments);
+ return realEmit.apply(source, arguments);
+ };
- let opts = _opts;
- if ('function' === typeof callback) {
- this.callback = callback;
- } else if (callback) {
- opts = callback;
+ source.on('error', function() {});
+ if (delayedStream.pauseStream) {
+ source.pause();
}
- // timeout for the socket to be returned from the callback
- this.timeout = (opts && opts.timeout) || null;
+ return delayedStream;
+};
- this.options = opts;
-}
-inherits(Agent, EventEmitter);
+Object.defineProperty(DelayedStream.prototype, 'readable', {
+ configurable: true,
+ enumerable: true,
+ get: function() {
+ return this.source.readable;
+ }
+});
-/**
- * Override this function in your subclass!
- */
-Agent.prototype.callback = function callback(req, opts) {
- throw new Error(
- '"agent-base" has no default implementation, you must subclass and override `callback()`'
- );
+DelayedStream.prototype.setEncoding = function() {
+ return this.source.setEncoding.apply(this.source, arguments);
};
-/**
- * Called by node-core's "_http_client.js" module when creating
- * a new HTTP request with this Agent instance.
- *
- * @api public
- */
-Agent.prototype.addRequest = function addRequest(req, _opts) {
- const ownOpts = Object.assign({}, _opts);
-
- // Set default `host` for HTTP to localhost
- if (null == ownOpts.host) {
- ownOpts.host = 'localhost';
+DelayedStream.prototype.resume = function() {
+ if (!this._released) {
+ this.release();
}
- // Set default `port` for HTTP if none was explicitly specified
- if (null == ownOpts.port) {
- ownOpts.port = ownOpts.secureEndpoint ? 443 : 80;
- }
+ this.source.resume();
+};
- const opts = Object.assign({}, this.options, ownOpts);
+DelayedStream.prototype.pause = function() {
+ this.source.pause();
+};
- if (opts.host && opts.path) {
- // If both a `host` and `path` are specified then it's most likely the
- // result of a `url.parse()` call... we need to remove the `path` portion so
- // that `net.connect()` doesn't attempt to open that as a unix socket file.
- delete opts.path;
- }
-
- delete opts.agent;
- delete opts.hostname;
- delete opts._defaultAgent;
- delete opts.defaultPort;
- delete opts.createConnection;
-
- // Hint to use "Connection: close"
- // XXX: non-documented `http` module API :(
- req._last = true;
- req.shouldKeepAlive = false;
+DelayedStream.prototype.release = function() {
+ this._released = true;
- // Create the `stream.Duplex` instance
- let timeout;
- let timedOut = false;
- const timeoutMs = this.timeout;
- const freeSocket = this.freeSocket;
+ this._bufferedEvents.forEach(function(args) {
+ this.emit.apply(this, args);
+ }.bind(this));
+ this._bufferedEvents = [];
+};
- function onerror(err) {
- if (req._hadError) return;
- req.emit('error', err);
- // For Safety. Some additional errors might fire later on
- // and we need to make sure we don't double-fire the error event.
- req._hadError = true;
- }
+DelayedStream.prototype.pipe = function() {
+ var r = Stream.prototype.pipe.apply(this, arguments);
+ this.resume();
+ return r;
+};
- function ontimeout() {
- timeout = null;
- timedOut = true;
- const err = new Error(
- 'A "socket" was not created for HTTP request before ' + timeoutMs + 'ms'
- );
- err.code = 'ETIMEOUT';
- onerror(err);
+DelayedStream.prototype._handleEmit = function(args) {
+ if (this._released) {
+ this.emit.apply(this, args);
+ return;
}
- function callbackError(err) {
- if (timedOut) return;
- if (timeout != null) {
- clearTimeout(timeout);
- timeout = null;
- }
- onerror(err);
+ if (args[0] === 'data') {
+ this.dataSize += args[1].length;
+ this._checkIfMaxDataSizeExceeded();
}
- function onsocket(socket) {
- if (timedOut) return;
- if (timeout != null) {
- clearTimeout(timeout);
- timeout = null;
- }
- if (isAgent(socket)) {
- // `socket` is actually an http.Agent instance, so relinquish
- // responsibility for this `req` to the Agent from here on
- socket.addRequest(req, opts);
- } else if (socket) {
- function onfree() {
- freeSocket(socket, opts);
- }
- socket.on('free', onfree);
- req.onSocket(socket);
- } else {
- const err = new Error(
- 'no Duplex stream was returned to agent-base for `' + req.method + ' ' + req.path + '`'
- );
- onerror(err);
- }
- }
+ this._bufferedEvents.push(args);
+};
- if (!this._promisifiedCallback && this.callback.length >= 3) {
- // Legacy callback function - convert to a Promise
- this.callback = promisify(this.callback, this);
- this._promisifiedCallback = true;
+DelayedStream.prototype._checkIfMaxDataSizeExceeded = function() {
+ if (this._maxDataSizeExceeded) {
+ return;
}
- if (timeoutMs > 0) {
- timeout = setTimeout(ontimeout, timeoutMs);
+ if (this.dataSize <= this.maxDataSize) {
+ return;
}
- try {
- Promise.resolve(this.callback(req, opts)).then(onsocket, callbackError);
- } catch (err) {
- Promise.reject(err).catch(callbackError);
- }
+ this._maxDataSizeExceeded = true;
+ var message =
+ 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.'
+ this.emit('error', new Error(message));
};
-Agent.prototype.freeSocket = function freeSocket(socket, opts) {
- // TODO reuse sockets
- socket.destroy();
+
+/***/ }),
+/* 153 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+var core = __webpack_require__(391);
+
+module.exports = function isCore(x) {
+ return Object.prototype.hasOwnProperty.call(core, x);
};
/***/ }),
-/* 235 */
+/* 154 */
/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
-var util = __webpack_require__(669)
-var stream = __webpack_require__(914)
-var delegate = __webpack_require__(967)
-var Tracker = __webpack_require__(623)
-var TrackerStream = module.exports = function (name, size, options) {
- stream.Transform.call(this, options)
- this.tracker = new Tracker(name, size)
- this.name = name
- this.id = this.tracker.id
- this.tracker.on('change', delegateChange(this))
-}
-util.inherits(TrackerStream, stream.Transform)
+const figgyPudding = __webpack_require__(965)
+const index = __webpack_require__(407)
+const memo = __webpack_require__(521)
+const write = __webpack_require__(186)
+const to = __webpack_require__(371).to
-function delegateChange (trackerStream) {
- return function (name, completion, tracker) {
- trackerStream.emit('change', name, completion, trackerStream)
- }
-}
+const PutOpts = figgyPudding({
+ algorithms: {
+ default: ['sha512']
+ },
+ integrity: {},
+ memoize: {},
+ metadata: {},
+ pickAlgorithm: {},
+ size: {},
+ tmpPrefix: {},
+ single: {},
+ sep: {},
+ error: {},
+ strict: {}
+})
-TrackerStream.prototype._transform = function (data, encoding, cb) {
- this.tracker.completeWork(data.length ? data.length : 1)
- this.push(data)
- cb()
+module.exports = putData
+function putData (cache, key, data, opts) {
+ opts = PutOpts(opts)
+ return write(cache, data, opts).then(res => {
+ return index.insert(
+ cache, key, res.integrity, opts.concat({ size: res.size })
+ ).then(entry => {
+ if (opts.memoize) {
+ memo.put(cache, entry, data, opts)
+ }
+ return res.integrity
+ })
+ })
}
-TrackerStream.prototype._flush = function (cb) {
- this.tracker.finish()
- cb()
+module.exports.stream = putStream
+function putStream (cache, key, opts) {
+ opts = PutOpts(opts)
+ let integrity
+ let size
+ const contentStream = write.stream(
+ cache, opts
+ ).on('integrity', int => {
+ integrity = int
+ }).on('size', s => {
+ size = s
+ })
+ let memoData
+ let memoTotal = 0
+ const stream = to((chunk, enc, cb) => {
+ contentStream.write(chunk, enc, () => {
+ if (opts.memoize) {
+ if (!memoData) { memoData = [] }
+ memoData.push(chunk)
+ memoTotal += chunk.length
+ }
+ cb()
+ })
+ }, cb => {
+ contentStream.end(() => {
+ index.insert(cache, key, integrity, opts.concat({ size })).then(entry => {
+ if (opts.memoize) {
+ memo.put(cache, entry, Buffer.concat(memoData, memoTotal), opts)
+ }
+ stream.emit('integrity', integrity)
+ cb()
+ })
+ })
+ })
+ let erred = false
+ stream.once('error', err => {
+ if (erred) { return }
+ erred = true
+ contentStream.emit('error', err)
+ })
+ contentStream.once('error', err => {
+ if (erred) { return }
+ erred = true
+ stream.emit('error', err)
+ })
+ return stream
}
-delegate(TrackerStream.prototype, 'tracker')
- .method('completed')
- .method('addWork')
- .method('finish')
-
/***/ }),
-/* 236 */,
-/* 237 */
+/* 155 */
/***/ (function(module) {
-module.exports = {"name":"make-fetch-happen","version":"5.0.2","description":"Opinionated, caching, retrying fetch client","main":"index.js","files":["*.js","lib"],"scripts":{"prerelease":"npm t","release":"standard-version -s","postrelease":"npm publish --tag=legacy && git push --follow-tags","pretest":"standard","test":"tap --coverage --nyc-arg=--all --timeout=35 -J test/*.js","update-coc":"weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'","update-contrib":"weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"},"repository":"https://github.com/zkat/make-fetch-happen","keywords":["http","request","fetch","mean girls","caching","cache","subresource integrity"],"author":{"name":"Kat Marchán","email":"kzm@zkat.tech","twitter":"maybekatz"},"license":"ISC","dependencies":{"agentkeepalive":"^3.4.1","cacache":"^12.0.0","http-cache-semantics":"^3.8.1","http-proxy-agent":"^2.1.0","https-proxy-agent":"^2.2.3","lru-cache":"^5.1.1","mississippi":"^3.0.0","node-fetch-npm":"^2.0.2","promise-retry":"^1.1.1","socks-proxy-agent":"^4.0.0","ssri":"^6.0.0"},"devDependencies":{"bluebird":"^3.5.1","mkdirp":"^0.5.1","nock":"^9.2.3","npmlog":"^4.1.2","require-inject":"^1.4.2","rimraf":"^2.6.2","safe-buffer":"^5.1.1","standard":"^11.0.1","standard-version":"^4.3.0","tacks":"^1.2.6","tap":"^12.7.0","weallbehave":"^1.0.0","weallcontribute":"^1.0.7"}};
-
-/***/ }),
-/* 238 */
-/***/ (function(__unusedmodule, exports) {
-
-"use strict";
+module.exports = extractDescription
+// Extracts description from contents of a readme file in markdown format
+function extractDescription (d) {
+ if (!d) return;
+ if (d === "ERROR: No README data found!") return;
+ // the first block of text before the first heading
+ // that isn't the first line heading
+ d = d.trim().split('\n')
+ for (var s = 0; d[s] && d[s].trim().match(/^(#|$)/); s ++);
+ var l = d.length
+ for (var e = s + 1; e < l && d[e].trim(); e ++);
+ return d.slice(s, e).join(' ').trim()
+}
-Object.defineProperty(exports, "__esModule", {
- value: true
-});
-exports.default = void 0;
-var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;
-exports.default = _default;
/***/ }),
-/* 239 */
+/* 156 */
/***/ (function(module, __unusedexports, __webpack_require__) {
-"use strict";
-
-var validate = __webpack_require__(285)
-var renderTemplate = __webpack_require__(874)
-var wideTruncate = __webpack_require__(504)
-var stringWidth = __webpack_require__(66)
-
-module.exports = function (theme, width, completed) {
- validate('ONN', [theme, width, completed])
- if (completed < 0) completed = 0
- if (completed > 1) completed = 1
- if (width <= 0) return ''
- var sofar = Math.round(width * completed)
- var rest = width - sofar
- var template = [
- {type: 'complete', value: repeat(theme.complete, sofar), length: sofar},
- {type: 'remaining', value: repeat(theme.remaining, rest), length: rest}
- ]
- return renderTemplate(width, template, theme)
-}
-
-// lodash's way of repeating
-function repeat (string, width) {
- var result = ''
- var n = width
- do {
- if (n % 2) {
- result += string
- }
- n = Math.floor(n / 2)
- /*eslint no-self-assign: 0*/
- string += string
- } while (n && stringWidth(result) < width)
-
- return wideTruncate(result, width)
-}
+/**
+ * Module dependencies.
+ */
+var tls; // lazy-loaded...
+var url = __webpack_require__(835);
+var dns = __webpack_require__(819);
+var Agent = __webpack_require__(234);
+var SocksClient = __webpack_require__(198).SocksClient;
+var inherits = __webpack_require__(669).inherits;
-/***/ }),
-/* 240 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+/**
+ * Module exports.
+ */
-"use strict";
+module.exports = SocksProxyAgent;
-const LRU = __webpack_require__(567)
-const url = __webpack_require__(835)
+/**
+ * The `SocksProxyAgent`.
+ *
+ * @api public
+ */
-let AGENT_CACHE = new LRU({ max: 50 })
-let HttpsAgent
-let HttpAgent
+function SocksProxyAgent(opts) {
+ if (!(this instanceof SocksProxyAgent)) return new SocksProxyAgent(opts);
+ if ('string' == typeof opts) opts = url.parse(opts);
+ if (!opts)
+ throw new Error(
+ 'a SOCKS proxy server `host` and `port` must be specified!'
+ );
+ Agent.call(this, opts);
-module.exports = getAgent
+ var proxy = Object.assign({}, opts);
-function getAgent (uri, opts) {
- const parsedUri = url.parse(typeof uri === 'string' ? uri : uri.url)
- const isHttps = parsedUri.protocol === 'https:'
- const pxuri = getProxyUri(uri, opts)
+ // prefer `hostname` over `host`, because of `url.parse()`
+ proxy.host = proxy.hostname || proxy.host;
- const key = [
- `https:${isHttps}`,
- pxuri
- ? `proxy:${pxuri.protocol}//${pxuri.host}:${pxuri.port}`
- : '>no-proxy<',
- `local-address:${opts.localAddress || '>no-local-address<'}`,
- `strict-ssl:${isHttps ? !!opts.strictSSL : '>no-strict-ssl<'}`,
- `ca:${(isHttps && opts.ca) || '>no-ca<'}`,
- `cert:${(isHttps && opts.cert) || '>no-cert<'}`,
- `key:${(isHttps && opts.key) || '>no-key<'}`
- ].join(':')
+ // SOCKS doesn't *technically* have a default port, but this is
+ // the same default that `curl(1)` uses
+ proxy.port = +proxy.port || 1080;
- if (opts.agent != null) { // `agent: false` has special behavior!
- return opts.agent
+ if (proxy.host && proxy.path) {
+ // if both a `host` and `path` are specified then it's most likely the
+ // result of a `url.parse()` call... we need to remove the `path` portion so
+ // that `net.connect()` doesn't attempt to open that as a unix socket file.
+ delete proxy.path;
+ delete proxy.pathname;
}
- if (AGENT_CACHE.peek(key)) {
- return AGENT_CACHE.get(key)
+ // figure out if we want socks v4 or v5, based on the "protocol" used.
+ // Defaults to 5.
+ proxy.lookup = false;
+ switch (proxy.protocol) {
+ case 'socks4:':
+ proxy.lookup = true;
+ // pass through
+ case 'socks4a:':
+ proxy.version = 4;
+ break;
+ case 'socks5:':
+ proxy.lookup = true;
+ // pass through
+ case 'socks:': // no version specified, default to 5h
+ case 'socks5h:':
+ proxy.version = 5;
+ break;
+ default:
+ throw new TypeError(
+ 'A "socks" protocol must be specified! Got: ' + proxy.protocol
+ );
}
- if (pxuri) {
- const proxy = getProxy(pxuri, opts, isHttps)
- AGENT_CACHE.set(key, proxy)
- return proxy
+ if (proxy.auth) {
+ var auth = proxy.auth.split(':');
+ proxy.authentication = { username: auth[0], password: auth[1] };
+ proxy.userid = auth[0];
}
+ this.proxy = proxy;
+}
+inherits(SocksProxyAgent, Agent);
- if (isHttps && !HttpsAgent) {
- HttpsAgent = __webpack_require__(112).HttpsAgent
- } else if (!isHttps && !HttpAgent) {
- HttpAgent = __webpack_require__(112)
- }
+/**
+ * Initiates a SOCKS connection to the specified SOCKS proxy server,
+ * which in turn connects to the specified remote host and port.
+ *
+ * @api public
+ */
- // If opts.timeout is zero, set the agentTimeout to zero as well. A timeout
- // of zero disables the timeout behavior (OS limits still apply). Else, if
- // opts.timeout is a non-zero value, set it to timeout + 1, to ensure that
- // the node-fetch-npm timeout will always fire first, giving us more
- // consistent errors.
- const agentTimeout = opts.timeout === 0 ? 0 : opts.timeout + 1
+SocksProxyAgent.prototype.callback = function connect(req, opts, fn) {
+ var proxy = this.proxy;
- const agent = isHttps ? new HttpsAgent({
- maxSockets: opts.maxSockets || 15,
- ca: opts.ca,
- cert: opts.cert,
- key: opts.key,
- localAddress: opts.localAddress,
- rejectUnauthorized: opts.strictSSL,
- timeout: agentTimeout
- }) : new HttpAgent({
- maxSockets: opts.maxSockets || 15,
- localAddress: opts.localAddress,
- timeout: agentTimeout
- })
- AGENT_CACHE.set(key, agent)
- return agent
-}
+ // called once the SOCKS proxy has connected to the specified remote endpoint
+ function onhostconnect(err, result) {
+ if (err) return fn(err);
-function checkNoProxy (uri, opts) {
- const host = url.parse(uri).hostname.split('.').reverse()
- let noproxy = (opts.noProxy || getProcessEnv('no_proxy'))
- if (typeof noproxy === 'string') {
- noproxy = noproxy.split(/\s*,\s*/g)
- }
- return noproxy && noproxy.some(no => {
- const noParts = no.split('.').filter(x => x).reverse()
- if (!noParts.length) { return false }
- for (let i = 0; i < noParts.length; i++) {
- if (host[i] !== noParts[i]) {
- return false
- }
+ var socket = result.socket;
+
+ var s = socket;
+ if (opts.secureEndpoint) {
+ // since the proxy is connecting to an SSL server, we have
+ // to upgrade this socket connection to an SSL connection
+ if (!tls) tls = __webpack_require__(16);
+ opts.socket = socket;
+ opts.servername = opts.host;
+ opts.host = null;
+ opts.hostname = null;
+ opts.port = null;
+ s = tls.connect(opts);
}
- return true
- })
-}
-module.exports.getProcessEnv = getProcessEnv
+ fn(null, s);
+ }
-function getProcessEnv (env) {
- if (!env) { return }
+ // called for the `dns.lookup()` callback
+ function onlookup(err, ip) {
+ if (err) return fn(err);
+ options.destination.host = ip;
+ SocksClient.createConnection(options, onhostconnect);
+ }
- let value
+ var options = {
+ proxy: {
+ ipaddress: proxy.host,
+ port: +proxy.port,
+ type: proxy.version
+ },
+ destination: {
+ port: +opts.port
+ },
+ command: 'connect'
+ };
- if (Array.isArray(env)) {
- for (let e of env) {
- value = process.env[e] ||
- process.env[e.toUpperCase()] ||
- process.env[e.toLowerCase()]
- if (typeof value !== 'undefined') { break }
- }
+ if (proxy.authentication) {
+ options.proxy.userId = proxy.userid;
+ options.proxy.password = proxy.authentication.password;
}
- if (typeof env === 'string') {
- value = process.env[env] ||
- process.env[env.toUpperCase()] ||
- process.env[env.toLowerCase()]
+ if (proxy.lookup) {
+ // client-side DNS resolution for "4" and "5" socks proxy versions
+ dns.lookup(opts.host, onlookup);
+ } else {
+ // proxy hostname DNS resolution for "4a" and "5h" socks proxy servers
+ onlookup(null, opts.host);
}
-
- return value
}
-function getProxyUri (uri, opts) {
- const protocol = url.parse(uri).protocol
-
- const proxy = opts.proxy || (
- protocol === 'https:' && getProcessEnv('https_proxy')
- ) || (
- protocol === 'http:' && getProcessEnv(['https_proxy', 'http_proxy', 'proxy'])
- )
- if (!proxy) { return null }
- const parsedProxy = (typeof proxy === 'string') ? url.parse(proxy) : proxy
+/***/ }),
+/* 157 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
- return !checkNoProxy(uri, opts) && parsedProxy
-}
+var async = __webpack_require__(751)
+ , abort = __webpack_require__(566)
+ ;
-let HttpProxyAgent
-let HttpsProxyAgent
-let SocksProxyAgent
-function getProxy (proxyUrl, opts, isHttps) {
- let popts = {
- host: proxyUrl.hostname,
- port: proxyUrl.port,
- protocol: proxyUrl.protocol,
- path: proxyUrl.path,
- auth: proxyUrl.auth,
- ca: opts.ca,
- cert: opts.cert,
- key: opts.key,
- timeout: opts.timeout === 0 ? 0 : opts.timeout + 1,
- localAddress: opts.localAddress,
- maxSockets: opts.maxSockets || 15,
- rejectUnauthorized: opts.strictSSL
- }
+// API
+module.exports = iterate;
- if (proxyUrl.protocol === 'http:' || proxyUrl.protocol === 'https:') {
- if (!isHttps) {
- if (!HttpProxyAgent) {
- HttpProxyAgent = __webpack_require__(934)
- }
+/**
+ * Iterates over each job object
+ *
+ * @param {array|object} list - array or object (named list) to iterate over
+ * @param {function} iterator - iterator to run
+ * @param {object} state - current job status
+ * @param {function} callback - invoked when all elements processed
+ */
+function iterate(list, iterator, state, callback)
+{
+ // store current index
+ var key = state['keyedList'] ? state['keyedList'][state.index] : state.index;
- return new HttpProxyAgent(popts)
- } else {
- if (!HttpsProxyAgent) {
- HttpsProxyAgent = __webpack_require__(717)
- }
+ state.jobs[key] = runJob(iterator, key, list[key], function(error, output)
+ {
+ // don't repeat yourself
+ // skip secondary callbacks
+ if (!(key in state.jobs))
+ {
+ return;
+ }
- return new HttpsProxyAgent(popts)
+ // clean up jobs
+ delete state.jobs[key];
+
+ if (error)
+ {
+ // don't process rest of the results
+ // stop still active jobs
+ // and reset the list
+ abort(state);
}
- }
- if (proxyUrl.protocol.startsWith('socks')) {
- if (!SocksProxyAgent) {
- SocksProxyAgent = __webpack_require__(310)
+ else
+ {
+ state.results[key] = output;
}
- return new SocksProxyAgent(popts)
+ // return salvaged results
+ callback(error, state.results);
+ });
+}
+
+/**
+ * Runs iterator over provided job element
+ *
+ * @param {function} iterator - iterator to invoke
+ * @param {string|number} key - key/index of the element in the list of jobs
+ * @param {mixed} item - job description
+ * @param {function} callback - invoked after iterator is done with the job
+ * @returns {function|mixed} - job abort function or something else
+ */
+function runJob(iterator, key, item, callback)
+{
+ var aborter;
+
+ // allow shortcut if iterator expects only two arguments
+ if (iterator.length == 2)
+ {
+ aborter = iterator(item, async(callback));
+ }
+ // otherwise go with full three arguments
+ else
+ {
+ aborter = iterator(item, key, async(callback));
}
+
+ return aborter;
}
/***/ }),
-/* 241 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
+/* 158 */
+/***/ (function(__unusedmodule, exports) {
"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+//# sourceMappingURL=Time.js.map
-Object.defineProperty(exports, "__esModule", {
- value: true
-});
-exports.default = _default;
-exports.URL = exports.DNS = void 0;
+/***/ }),
+/* 159 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
-var _stringify = _interopRequireDefault(__webpack_require__(855));
+"use strict";
-var _parse = _interopRequireDefault(__webpack_require__(197));
-function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+// tar -c
+const hlo = __webpack_require__(891)
-function stringToBytes(str) {
- str = unescape(encodeURIComponent(str)); // UTF8 escape
+const Pack = __webpack_require__(415)
+const fs = __webpack_require__(747)
+const fsm = __webpack_require__(827)
+const t = __webpack_require__(579)
+const path = __webpack_require__(622)
- const bytes = [];
+const c = module.exports = (opt_, files, cb) => {
+ if (typeof files === 'function')
+ cb = files
- for (let i = 0; i < str.length; ++i) {
- bytes.push(str.charCodeAt(i));
- }
+ if (Array.isArray(opt_))
+ files = opt_, opt_ = {}
- return bytes;
-}
+ if (!files || !Array.isArray(files) || !files.length)
+ throw new TypeError('no files or directories specified')
-const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';
-exports.DNS = DNS;
-const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';
-exports.URL = URL;
+ files = Array.from(files)
-function _default(name, version, hashfunc) {
- function generateUUID(value, namespace, buf, offset) {
- if (typeof value === 'string') {
- value = stringToBytes(value);
- }
+ const opt = hlo(opt_)
- if (typeof namespace === 'string') {
- namespace = (0, _parse.default)(namespace);
- }
+ if (opt.sync && typeof cb === 'function')
+ throw new TypeError('callback not supported for sync tar functions')
- if (namespace.length !== 16) {
- throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');
- } // Compute hash of namespace and value, Per 4.3
- // Future: Use spread syntax when supported on all platforms, e.g. `bytes =
- // hashfunc([...namespace, ... value])`
+ if (!opt.file && typeof cb === 'function')
+ throw new TypeError('callback only supported with file option')
+ return opt.file && opt.sync ? createFileSync(opt, files)
+ : opt.file ? createFile(opt, files, cb)
+ : opt.sync ? createSync(opt, files)
+ : create(opt, files)
+}
- let bytes = new Uint8Array(16 + value.length);
- bytes.set(namespace);
- bytes.set(value, namespace.length);
- bytes = hashfunc(bytes);
- bytes[6] = bytes[6] & 0x0f | version;
- bytes[8] = bytes[8] & 0x3f | 0x80;
+const createFileSync = (opt, files) => {
+ const p = new Pack.Sync(opt)
+ const stream = new fsm.WriteStreamSync(opt.file, {
+ mode: opt.mode || 0o666
+ })
+ p.pipe(stream)
+ addFilesSync(p, files)
+}
- if (buf) {
- offset = offset || 0;
+const createFile = (opt, files, cb) => {
+ const p = new Pack(opt)
+ const stream = new fsm.WriteStream(opt.file, {
+ mode: opt.mode || 0o666
+ })
+ p.pipe(stream)
- for (let i = 0; i < 16; ++i) {
- buf[offset + i] = bytes[i];
- }
+ const promise = new Promise((res, rej) => {
+ stream.on('error', rej)
+ stream.on('close', res)
+ p.on('error', rej)
+ })
- return buf;
- }
+ addFilesAsync(p, files)
- return (0, _stringify.default)(bytes);
- } // Function#name is not settable on some platforms (#270)
+ return cb ? promise.then(cb, cb) : promise
+}
+const addFilesSync = (p, files) => {
+ files.forEach(file => {
+ if (file.charAt(0) === '@')
+ t({
+ file: path.resolve(p.cwd, file.substr(1)),
+ sync: true,
+ noResume: true,
+ onentry: entry => p.add(entry)
+ })
+ else
+ p.add(file)
+ })
+ p.end()
+}
- try {
- generateUUID.name = name; // eslint-disable-next-line no-empty
- } catch (err) {} // For CommonJS default export support
+const addFilesAsync = (p, files) => {
+ while (files.length) {
+ const file = files.shift()
+ if (file.charAt(0) === '@')
+ return t({
+ file: path.resolve(p.cwd, file.substr(1)),
+ noResume: true,
+ onentry: entry => p.add(entry)
+ }).then(_ => addFilesAsync(p, files))
+ else
+ p.add(file)
+ }
+ p.end()
+}
+const createSync = (opt, files) => {
+ const p = new Pack.Sync(opt)
+ addFilesSync(p, files)
+ return p
+}
- generateUUID.DNS = DNS;
- generateUUID.URL = URL;
- return generateUUID;
+const create = (opt, files) => {
+ const p = new Pack(opt)
+ addFilesAsync(p, files)
+ return p
}
-/***/ }),
-/* 242 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-"use strict";
+/***/ }),
+/* 160 */,
+/* 161 */
+/***/ (function(module, exports, __webpack_require__) {
-Object.defineProperty(exports, "__esModule", { value: true });
-__webpack_require__(71);
+/**
+ * Module dependencies.
+ */
+var tty = __webpack_require__(867);
+var util = __webpack_require__(669);
-/***/ }),
-/* 243 */
-/***/ (function(module) {
+/**
+ * This is the Node.js implementation of `debug()`.
+ *
+ * Expose `debug()` as the module.
+ */
-"use strict";
+exports = module.exports = __webpack_require__(778);
+exports.init = init;
+exports.log = log;
+exports.formatArgs = formatArgs;
+exports.save = save;
+exports.load = load;
+exports.useColors = useColors;
+/**
+ * Colors.
+ */
-module.exports = isWin32() || isColorTerm()
+exports.colors = [ 6, 2, 3, 4, 5, 1 ];
-function isWin32 () {
- return process.platform === 'win32'
+try {
+ var supportsColor = __webpack_require__(247);
+ if (supportsColor && supportsColor.level >= 2) {
+ exports.colors = [
+ 20, 21, 26, 27, 32, 33, 38, 39, 40, 41, 42, 43, 44, 45, 56, 57, 62, 63, 68,
+ 69, 74, 75, 76, 77, 78, 79, 80, 81, 92, 93, 98, 99, 112, 113, 128, 129, 134,
+ 135, 148, 149, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171,
+ 172, 173, 178, 179, 184, 185, 196, 197, 198, 199, 200, 201, 202, 203, 204,
+ 205, 206, 207, 208, 209, 214, 215, 220, 221
+ ];
+ }
+} catch (err) {
+ // swallow - we only care if `supports-color` is available; it doesn't have to be.
}
-function isColorTerm () {
- var termHasColor = /^screen|^xterm|^vt100|color|ansi|cygwin|linux/i
- return !!process.env.COLORTERM || termHasColor.test(process.env.TERM)
+/**
+ * Build up the default `inspectOpts` object from the environment variables.
+ *
+ * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js
+ */
+
+exports.inspectOpts = Object.keys(process.env).filter(function (key) {
+ return /^debug_/i.test(key);
+}).reduce(function (obj, key) {
+ // camel-case
+ var prop = key
+ .substring(6)
+ .toLowerCase()
+ .replace(/_([a-z])/g, function (_, k) { return k.toUpperCase() });
+
+ // coerce string value into JS value
+ var val = process.env[key];
+ if (/^(yes|on|true|enabled)$/i.test(val)) val = true;
+ else if (/^(no|off|false|disabled)$/i.test(val)) val = false;
+ else if (val === 'null') val = null;
+ else val = Number(val);
+
+ obj[prop] = val;
+ return obj;
+}, {});
+
+/**
+ * Is stdout a TTY? Colored output is enabled when `true`.
+ */
+
+function useColors() {
+ return 'colors' in exports.inspectOpts
+ ? Boolean(exports.inspectOpts.colors)
+ : tty.isatty(process.stderr.fd);
}
+/**
+ * Map %o to `util.inspect()`, all on a single line.
+ */
-/***/ }),
-/* 244 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+exports.formatters.o = function(v) {
+ this.inspectOpts.colors = this.useColors;
+ return util.inspect(v, this.inspectOpts)
+ .split('\n').map(function(str) {
+ return str.trim()
+ }).join(' ');
+};
-"use strict";
+/**
+ * Map %o to `util.inspect()`, allowing multiple lines if needed.
+ */
+exports.formatters.O = function(v) {
+ this.inspectOpts.colors = this.useColors;
+ return util.inspect(v, this.inspectOpts);
+};
-module.exports = __webpack_require__(442)
+/**
+ * Adds ANSI color escape codes if enabled.
+ *
+ * @api public
+ */
+function formatArgs(args) {
+ var name = this.namespace;
+ var useColors = this.useColors;
-/***/ }),
-/* 245 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
+ if (useColors) {
+ var c = this.color;
+ var colorCode = '\u001b[3' + (c < 8 ? c : '8;5;' + c);
+ var prefix = ' ' + colorCode + ';1m' + name + ' ' + '\u001b[0m';
-"use strict";
+ args[0] = prefix + args[0].split('\n').join('\n' + prefix);
+ args.push(colorCode + 'm+' + exports.humanize(this.diff) + '\u001b[0m');
+ } else {
+ args[0] = getDate() + name + ' ' + args[0];
+ }
+}
+function getDate() {
+ if (exports.inspectOpts.hideDate) {
+ return '';
+ } else {
+ return new Date().toISOString() + ' ';
+ }
+}
-Object.defineProperty(exports, "__esModule", {
- value: true
-});
-exports.default = void 0;
+/**
+ * Invokes `util.format()` with the specified arguments and writes to stderr.
+ */
-var _crypto = _interopRequireDefault(__webpack_require__(417));
+function log() {
+ return process.stderr.write(util.format.apply(util, arguments) + '\n');
+}
-function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+/**
+ * Save `namespaces`.
+ *
+ * @param {String} namespaces
+ * @api private
+ */
-function md5(bytes) {
- if (Array.isArray(bytes)) {
- bytes = Buffer.from(bytes);
- } else if (typeof bytes === 'string') {
- bytes = Buffer.from(bytes, 'utf8');
+function save(namespaces) {
+ if (null == namespaces) {
+ // If you set a process.env field to null or undefined, it gets cast to the
+ // string 'null' or 'undefined'. Just delete instead.
+ delete process.env.DEBUG;
+ } else {
+ process.env.DEBUG = namespaces;
}
+}
- return _crypto.default.createHash('md5').update(bytes).digest();
+/**
+ * Load `namespaces`.
+ *
+ * @return {String} returns the previously persisted debug modes
+ * @api private
+ */
+
+function load() {
+ return process.env.DEBUG;
}
-var _default = md5;
-exports.default = _default;
+/**
+ * Init logic for `debug` instances.
+ *
+ * Create a new `inspectOpts` object in case `useColors` is set
+ * differently for a particular `debug` instance.
+ */
+
+function init (debug) {
+ debug.inspectOpts = {};
+
+ var keys = Object.keys(exports.inspectOpts);
+ for (var i = 0; i < keys.length; i++) {
+ debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]];
+ }
+}
+
+/**
+ * Enable namespaces listed in `process.env.DEBUG` initially.
+ */
+
+exports.enable(load());
+
/***/ }),
-/* 246 */
+/* 162 */
/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
-module.exports = function(Promise, INTERNAL, tryConvertToPromise,
- apiRejection, Proxyable) {
-var util = __webpack_require__(248);
-var isArray = util.isArray;
+var Buffer = __webpack_require__(215).Buffer;
-function toResolutionValue(val) {
- switch(val) {
- case -2: return [];
- case -3: return {};
- case -6: return new Map();
- }
-}
+// Export Node.js internal encodings.
-function PromiseArray(values) {
- var promise = this._promise = new Promise(INTERNAL);
- if (values instanceof Promise) {
- promise._propagateFrom(values, 3);
- values.suppressUnhandledRejections();
- }
- promise._setOnCancel(this);
- this._values = values;
- this._length = 0;
- this._totalResolved = 0;
- this._init(undefined, -2);
-}
-util.inherits(PromiseArray, Proxyable);
+module.exports = {
+ // Encodings
+ utf8: { type: "_internal", bomAware: true},
+ cesu8: { type: "_internal", bomAware: true},
+ unicode11utf8: "utf8",
-PromiseArray.prototype.length = function () {
- return this._length;
-};
+ ucs2: { type: "_internal", bomAware: true},
+ utf16le: "ucs2",
-PromiseArray.prototype.promise = function () {
- return this._promise;
+ binary: { type: "_internal" },
+ base64: { type: "_internal" },
+ hex: { type: "_internal" },
+
+ // Codec.
+ _internal: InternalCodec,
};
-PromiseArray.prototype._init = function init(_, resolveValueIfEmpty) {
- var values = tryConvertToPromise(this._values, this._promise);
- if (values instanceof Promise) {
- values = values._target();
- var bitField = values._bitField;
- ;
- this._values = values;
+//------------------------------------------------------------------------------
- if (((bitField & 50397184) === 0)) {
- this._promise._setAsyncGuaranteed();
- return values._then(
- init,
- this._reject,
- undefined,
- this,
- resolveValueIfEmpty
- );
- } else if (((bitField & 33554432) !== 0)) {
- values = values._value();
- } else if (((bitField & 16777216) !== 0)) {
- return this._reject(values._reason());
- } else {
- return this._cancel();
- }
- }
- values = util.asArray(values);
- if (values === null) {
- var err = apiRejection(
- "expecting an array or an iterable object but got " + util.classString(values)).reason();
- this._promise._rejectCallback(err, false);
- return;
- }
+function InternalCodec(codecOptions, iconv) {
+ this.enc = codecOptions.encodingName;
+ this.bomAware = codecOptions.bomAware;
- if (values.length === 0) {
- if (resolveValueIfEmpty === -5) {
- this._resolveEmptyArray();
- }
- else {
- this._resolve(toResolutionValue(resolveValueIfEmpty));
+ if (this.enc === "base64")
+ this.encoder = InternalEncoderBase64;
+ else if (this.enc === "cesu8") {
+ this.enc = "utf8"; // Use utf8 for decoding.
+ this.encoder = InternalEncoderCesu8;
+
+ // Add decoder for versions of Node not supporting CESU-8
+ if (Buffer.from('eda0bdedb2a9', 'hex').toString() !== '💩') {
+ this.decoder = InternalDecoderCesu8;
+ this.defaultCharUnicode = iconv.defaultCharUnicode;
}
- return;
}
- this._iterate(values);
-};
-
-PromiseArray.prototype._iterate = function(values) {
- var len = this.getActualLength(values.length);
- this._length = len;
- this._values = this.shouldCopyValues() ? new Array(len) : this._values;
- var result = this._promise;
- var isResolved = false;
- var bitField = null;
- for (var i = 0; i < len; ++i) {
- var maybePromise = tryConvertToPromise(values[i], result);
+}
- if (maybePromise instanceof Promise) {
- maybePromise = maybePromise._target();
- bitField = maybePromise._bitField;
- } else {
- bitField = null;
- }
+InternalCodec.prototype.encoder = InternalEncoder;
+InternalCodec.prototype.decoder = InternalDecoder;
- if (isResolved) {
- if (bitField !== null) {
- maybePromise.suppressUnhandledRejections();
- }
- } else if (bitField !== null) {
- if (((bitField & 50397184) === 0)) {
- maybePromise._proxy(this, i);
- this._values[i] = maybePromise;
- } else if (((bitField & 33554432) !== 0)) {
- isResolved = this._promiseFulfilled(maybePromise._value(), i);
- } else if (((bitField & 16777216) !== 0)) {
- isResolved = this._promiseRejected(maybePromise._reason(), i);
- } else {
- isResolved = this._promiseCancelled(i);
- }
- } else {
- isResolved = this._promiseFulfilled(maybePromise, i);
- }
- }
- if (!isResolved) result._setAsyncGuaranteed();
-};
+//------------------------------------------------------------------------------
-PromiseArray.prototype._isResolved = function () {
- return this._values === null;
-};
+// We use node.js internal decoder. Its signature is the same as ours.
+var StringDecoder = __webpack_require__(304).StringDecoder;
-PromiseArray.prototype._resolve = function (value) {
- this._values = null;
- this._promise._fulfill(value);
-};
+if (!StringDecoder.prototype.end) // Node v0.8 doesn't have this method.
+ StringDecoder.prototype.end = function() {};
-PromiseArray.prototype._cancel = function() {
- if (this._isResolved() || !this._promise._isCancellable()) return;
- this._values = null;
- this._promise._cancel();
-};
-PromiseArray.prototype._reject = function (reason) {
- this._values = null;
- this._promise._rejectCallback(reason, false);
-};
+function InternalDecoder(options, codec) {
+ this.decoder = new StringDecoder(codec.enc);
+}
-PromiseArray.prototype._promiseFulfilled = function (value, index) {
- this._values[index] = value;
- var totalResolved = ++this._totalResolved;
- if (totalResolved >= this._length) {
- this._resolve(this._values);
- return true;
+InternalDecoder.prototype.write = function(buf) {
+ if (!Buffer.isBuffer(buf)) {
+ buf = Buffer.from(buf);
}
- return false;
-};
-PromiseArray.prototype._promiseCancelled = function() {
- this._cancel();
- return true;
-};
+ return this.decoder.write(buf);
+}
-PromiseArray.prototype._promiseRejected = function (reason) {
- this._totalResolved++;
- this._reject(reason);
- return true;
-};
+InternalDecoder.prototype.end = function() {
+ return this.decoder.end();
+}
-PromiseArray.prototype._resultCancelled = function() {
- if (this._isResolved()) return;
- var values = this._values;
- this._cancel();
- if (values instanceof Promise) {
- values.cancel();
- } else {
- for (var i = 0; i < values.length; ++i) {
- if (values[i] instanceof Promise) {
- values[i].cancel();
- }
- }
- }
-};
-PromiseArray.prototype.shouldCopyValues = function () {
- return true;
-};
+//------------------------------------------------------------------------------
+// Encoder is mostly trivial
-PromiseArray.prototype.getActualLength = function (len) {
- return len;
-};
+function InternalEncoder(options, codec) {
+ this.enc = codec.enc;
+}
-return PromiseArray;
-};
+InternalEncoder.prototype.write = function(str) {
+ return Buffer.from(str, this.enc);
+}
+InternalEncoder.prototype.end = function() {
+}
-/***/ }),
-/* 247 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-"use strict";
+//------------------------------------------------------------------------------
+// Except base64 encoder, which must keep its state.
-const os = __webpack_require__(87);
-const tty = __webpack_require__(867);
-const hasFlag = __webpack_require__(364);
+function InternalEncoderBase64(options, codec) {
+ this.prevStr = '';
+}
-const {env} = process;
+InternalEncoderBase64.prototype.write = function(str) {
+ str = this.prevStr + str;
+ var completeQuads = str.length - (str.length % 4);
+ this.prevStr = str.slice(completeQuads);
+ str = str.slice(0, completeQuads);
-let forceColor;
-if (hasFlag('no-color') ||
- hasFlag('no-colors') ||
- hasFlag('color=false') ||
- hasFlag('color=never')) {
- forceColor = 0;
-} else if (hasFlag('color') ||
- hasFlag('colors') ||
- hasFlag('color=true') ||
- hasFlag('color=always')) {
- forceColor = 1;
+ return Buffer.from(str, "base64");
}
-if ('FORCE_COLOR' in env) {
- if (env.FORCE_COLOR === 'true') {
- forceColor = 1;
- } else if (env.FORCE_COLOR === 'false') {
- forceColor = 0;
- } else {
- forceColor = env.FORCE_COLOR.length === 0 ? 1 : Math.min(parseInt(env.FORCE_COLOR, 10), 3);
- }
+InternalEncoderBase64.prototype.end = function() {
+ return Buffer.from(this.prevStr, "base64");
}
-function translateLevel(level) {
- if (level === 0) {
- return false;
- }
-
- return {
- level,
- hasBasic: true,
- has256: level >= 2,
- has16m: level >= 3
- };
-}
-function supportsColor(haveStream, streamIsTTY) {
- if (forceColor === 0) {
- return 0;
- }
+//------------------------------------------------------------------------------
+// CESU-8 encoder is also special.
- if (hasFlag('color=16m') ||
- hasFlag('color=full') ||
- hasFlag('color=truecolor')) {
- return 3;
- }
+function InternalEncoderCesu8(options, codec) {
+}
- if (hasFlag('color=256')) {
- return 2;
- }
+InternalEncoderCesu8.prototype.write = function(str) {
+ var buf = Buffer.alloc(str.length * 3), bufIdx = 0;
+ for (var i = 0; i < str.length; i++) {
+ var charCode = str.charCodeAt(i);
+ // Naive implementation, but it works because CESU-8 is especially easy
+ // to convert from UTF-16 (which all JS strings are encoded in).
+ if (charCode < 0x80)
+ buf[bufIdx++] = charCode;
+ else if (charCode < 0x800) {
+ buf[bufIdx++] = 0xC0 + (charCode >>> 6);
+ buf[bufIdx++] = 0x80 + (charCode & 0x3f);
+ }
+ else { // charCode will always be < 0x10000 in javascript.
+ buf[bufIdx++] = 0xE0 + (charCode >>> 12);
+ buf[bufIdx++] = 0x80 + ((charCode >>> 6) & 0x3f);
+ buf[bufIdx++] = 0x80 + (charCode & 0x3f);
+ }
+ }
+ return buf.slice(0, bufIdx);
+}
- if (haveStream && !streamIsTTY && forceColor === undefined) {
- return 0;
- }
+InternalEncoderCesu8.prototype.end = function() {
+}
- const min = forceColor || 0;
+//------------------------------------------------------------------------------
+// CESU-8 decoder is not implemented in Node v4.0+
- if (env.TERM === 'dumb') {
- return min;
- }
+function InternalDecoderCesu8(options, codec) {
+ this.acc = 0;
+ this.contBytes = 0;
+ this.accBytes = 0;
+ this.defaultCharUnicode = codec.defaultCharUnicode;
+}
- if (process.platform === 'win32') {
- // Windows 10 build 10586 is the first Windows release that supports 256 colors.
- // Windows 10 build 14931 is the first release that supports 16m/TrueColor.
- const osRelease = os.release().split('.');
- if (
- Number(osRelease[0]) >= 10 &&
- Number(osRelease[2]) >= 10586
- ) {
- return Number(osRelease[2]) >= 14931 ? 3 : 2;
- }
+InternalDecoderCesu8.prototype.write = function(buf) {
+ var acc = this.acc, contBytes = this.contBytes, accBytes = this.accBytes,
+ res = '';
+ for (var i = 0; i < buf.length; i++) {
+ var curByte = buf[i];
+ if ((curByte & 0xC0) !== 0x80) { // Leading byte
+ if (contBytes > 0) { // Previous code is invalid
+ res += this.defaultCharUnicode;
+ contBytes = 0;
+ }
- return 1;
- }
+ if (curByte < 0x80) { // Single-byte code
+ res += String.fromCharCode(curByte);
+ } else if (curByte < 0xE0) { // Two-byte code
+ acc = curByte & 0x1F;
+ contBytes = 1; accBytes = 1;
+ } else if (curByte < 0xF0) { // Three-byte code
+ acc = curByte & 0x0F;
+ contBytes = 2; accBytes = 1;
+ } else { // Four or more are not supported for CESU-8.
+ res += this.defaultCharUnicode;
+ }
+ } else { // Continuation byte
+ if (contBytes > 0) { // We're waiting for it.
+ acc = (acc << 6) | (curByte & 0x3f);
+ contBytes--; accBytes++;
+ if (contBytes === 0) {
+ // Check for overlong encoding, but support Modified UTF-8 (encoding NULL as C0 80)
+ if (accBytes === 2 && acc < 0x80 && acc > 0)
+ res += this.defaultCharUnicode;
+ else if (accBytes === 3 && acc < 0x800)
+ res += this.defaultCharUnicode;
+ else
+ // Actually add character.
+ res += String.fromCharCode(acc);
+ }
+ } else { // Unexpected continuation byte
+ res += this.defaultCharUnicode;
+ }
+ }
+ }
+ this.acc = acc; this.contBytes = contBytes; this.accBytes = accBytes;
+ return res;
+}
- if ('CI' in env) {
- if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI', 'GITHUB_ACTIONS', 'BUILDKITE'].some(sign => sign in env) || env.CI_NAME === 'codeship') {
- return 1;
- }
+InternalDecoderCesu8.prototype.end = function() {
+ var res = 0;
+ if (this.contBytes > 0)
+ res += this.defaultCharUnicode;
+ return res;
+}
- return min;
- }
- if ('TEAMCITY_VERSION' in env) {
- return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0;
- }
+/***/ }),
+/* 163 */,
+/* 164 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
- if (env.COLORTERM === 'truecolor') {
- return 3;
- }
+const SemVer = __webpack_require__(206)
+const Range = __webpack_require__(124)
+const gt = __webpack_require__(486)
- if ('TERM_PROGRAM' in env) {
- const version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10);
+const minVersion = (range, loose) => {
+ range = new Range(range, loose)
- switch (env.TERM_PROGRAM) {
- case 'iTerm.app':
- return version >= 3 ? 3 : 2;
- case 'Apple_Terminal':
- return 2;
- // No default
- }
- }
+ let minver = new SemVer('0.0.0')
+ if (range.test(minver)) {
+ return minver
+ }
- if (/-256(color)?$/i.test(env.TERM)) {
- return 2;
- }
+ minver = new SemVer('0.0.0-0')
+ if (range.test(minver)) {
+ return minver
+ }
- if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) {
- return 1;
- }
+ minver = null
+ for (let i = 0; i < range.set.length; ++i) {
+ const comparators = range.set[i]
- if ('COLORTERM' in env) {
- return 1;
- }
+ let setMin = null
+ comparators.forEach((comparator) => {
+ // Clone to avoid manipulating the comparator's semver object.
+ const compver = new SemVer(comparator.semver.version)
+ switch (comparator.operator) {
+ case '>':
+ if (compver.prerelease.length === 0) {
+ compver.patch++
+ } else {
+ compver.prerelease.push(0)
+ }
+ compver.raw = compver.format()
+ /* fallthrough */
+ case '':
+ case '>=':
+ if (!setMin || gt(compver, setMin)) {
+ setMin = compver
+ }
+ break
+ case '<':
+ case '<=':
+ /* Ignore maximum versions */
+ break
+ /* istanbul ignore next */
+ default:
+ throw new Error(`Unexpected operation: ${comparator.operator}`)
+ }
+ })
+ if (setMin && (!minver || gt(minver, setMin)))
+ minver = setMin
+ }
- return min;
-}
+ if (minver && range.test(minver)) {
+ return minver
+ }
-function getSupportLevel(stream) {
- const level = supportsColor(stream, stream && stream.isTTY);
- return translateLevel(level);
+ return null
}
-
-module.exports = {
- supportsColor: getSupportLevel,
- stdout: translateLevel(supportsColor(true, tty.isatty(1))),
- stderr: translateLevel(supportsColor(true, tty.isatty(2)))
-};
+module.exports = minVersion
/***/ }),
-/* 248 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+/* 165 */
+/***/ (function(__unusedmodule, exports) {
"use strict";
-var es5 = __webpack_require__(883);
-var canEvaluate = typeof navigator == "undefined";
+/*
+ * Copyright The OpenTelemetry Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+Object.defineProperty(exports, "__esModule", { value: true });
+//# sourceMappingURL=Plugin.js.map
-var errorObj = {e: {}};
-var tryCatchTarget;
-var globalObject = typeof self !== "undefined" ? self :
- typeof window !== "undefined" ? window :
- typeof global !== "undefined" ? global :
- this !== undefined ? this : null;
+/***/ }),
+/* 166 */,
+/* 167 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
-function tryCatcher() {
- try {
- var target = tryCatchTarget;
- tryCatchTarget = null;
- return target.apply(this, arguments);
- } catch (e) {
- errorObj.e = e;
- return errorObj;
- }
-}
-function tryCatch(fn) {
- tryCatchTarget = fn;
- return tryCatcher;
-}
+const compare = __webpack_require__(874)
+const gte = (a, b, loose) => compare(a, b, loose) >= 0
+module.exports = gte
-var inherits = function(Child, Parent) {
- var hasProp = {}.hasOwnProperty;
- function T() {
- this.constructor = Child;
- this.constructor$ = Parent;
- for (var propertyName in Parent.prototype) {
- if (hasProp.call(Parent.prototype, propertyName) &&
- propertyName.charAt(propertyName.length-1) !== "$"
- ) {
- this[propertyName + "$"] = Parent.prototype[propertyName];
- }
- }
- }
- T.prototype = Parent.prototype;
- Child.prototype = new T();
- return Child.prototype;
-};
+/***/ }),
+/* 168 */,
+/* 169 */
+/***/ (function(module) {
+"use strict";
-function isPrimitive(val) {
- return val == null || val === true || val === false ||
- typeof val === "string" || typeof val === "number";
+/*
+ * Role
+ *
+ * A Role encapsulates a particular object's 'role' in a method's
+ * dispatch. They are added directly to the selector for a method, and thus
+ * do not prevent the objects a method was defined on from being garbage
+ * collected.
+ */
+module.exports = Role
+function Role (method, position) {
+ this.method = method
+ this.position = position
}
-function isObject(value) {
- return typeof value === "function" ||
- typeof value === "object" && value !== null;
-}
+Role.roleKeyName = Symbol('roles')
-function maybeWrapAsError(maybeError) {
- if (!isPrimitive(maybeError)) return maybeError;
- return new Error(safeToString(maybeError));
-}
+/***/ }),
+/* 170 */,
+/* 171 */,
+/* 172 */,
+/* 173 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
-function withAppended(target, appendee) {
- var len = target.length;
- var ret = new Array(len + 1);
- var i;
- for (i = 0; i < len; ++i) {
- ret[i] = target[i];
- }
- ret[i] = appendee;
- return ret;
-}
+"use strict";
-function getDataPropertyOrDefault(obj, key, defaultValue) {
- if (es5.isES5) {
- var desc = Object.getOwnPropertyDescriptor(obj, key);
- if (desc != null) {
- return desc.get == null && desc.set == null
- ? desc.value
- : defaultValue;
- }
- } else {
- return {}.hasOwnProperty.call(obj, key) ? obj[key] : undefined;
- }
-}
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = void 0;
-function notEnumerableProp(obj, name, value) {
- if (isPrimitive(obj)) return obj;
- var descriptor = {
- value: value,
- configurable: true,
- enumerable: false,
- writable: true
- };
- es5.defineProperty(obj, name, descriptor);
- return obj;
-}
+var _rng = _interopRequireDefault(__webpack_require__(944));
-function thrower(r) {
- throw r;
-}
+var _stringify = _interopRequireDefault(__webpack_require__(855));
-var inheritedDataKeys = (function() {
- var excludedPrototypes = [
- Array.prototype,
- Object.prototype,
- Function.prototype
- ];
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
- var isExcludedProto = function(val) {
- for (var i = 0; i < excludedPrototypes.length; ++i) {
- if (excludedPrototypes[i] === val) {
- return true;
- }
- }
- return false;
- };
+// **`v1()` - Generate time-based UUID**
+//
+// Inspired by https://github.com/LiosK/UUID.js
+// and http://docs.python.org/library/uuid.html
+let _nodeId;
- if (es5.isES5) {
- var getKeys = Object.getOwnPropertyNames;
- return function(obj) {
- var ret = [];
- var visitedKeys = Object.create(null);
- while (obj != null && !isExcludedProto(obj)) {
- var keys;
- try {
- keys = getKeys(obj);
- } catch (e) {
- return ret;
- }
- for (var i = 0; i < keys.length; ++i) {
- var key = keys[i];
- if (visitedKeys[key]) continue;
- visitedKeys[key] = true;
- var desc = Object.getOwnPropertyDescriptor(obj, key);
- if (desc != null && desc.get == null && desc.set == null) {
- ret.push(key);
- }
- }
- obj = es5.getPrototypeOf(obj);
- }
- return ret;
- };
- } else {
- var hasProp = {}.hasOwnProperty;
- return function(obj) {
- if (isExcludedProto(obj)) return [];
- var ret = [];
+let _clockseq; // Previous uuid creation time
- /*jshint forin:false */
- enumeration: for (var key in obj) {
- if (hasProp.call(obj, key)) {
- ret.push(key);
- } else {
- for (var i = 0; i < excludedPrototypes.length; ++i) {
- if (hasProp.call(excludedPrototypes[i], key)) {
- continue enumeration;
- }
- }
- ret.push(key);
- }
- }
- return ret;
- };
- }
-})();
+let _lastMSecs = 0;
+let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details
-var thisAssignmentPattern = /this\s*\.\s*\S+\s*=/;
-function isClass(fn) {
- try {
- if (typeof fn === "function") {
- var keys = es5.names(fn.prototype);
+function v1(options, buf, offset) {
+ let i = buf && offset || 0;
+ const b = buf || new Array(16);
+ options = options || {};
+ let node = options.node || _nodeId;
+ let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not
+ // specified. We do this lazily to minimize issues related to insufficient
+ // system entropy. See #189
- var hasMethods = es5.isES5 && keys.length > 1;
- var hasMethodsOtherThanConstructor = keys.length > 0 &&
- !(keys.length === 1 && keys[0] === "constructor");
- var hasThisAssignmentAndStaticMethods =
- thisAssignmentPattern.test(fn + "") && es5.names(fn).length > 0;
+ if (node == null || clockseq == null) {
+ const seedBytes = options.random || (options.rng || _rng.default)();
- if (hasMethods || hasMethodsOtherThanConstructor ||
- hasThisAssignmentAndStaticMethods) {
- return true;
- }
- }
- return false;
- } catch (e) {
- return false;
+ if (node == null) {
+ // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
+ node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];
}
-}
-function toFastProperties(obj) {
- /*jshint -W027,-W055,-W031*/
- function FakeConstructor() {}
- FakeConstructor.prototype = obj;
- var receiver = new FakeConstructor();
- function ic() {
- return typeof receiver.foo;
+ if (clockseq == null) {
+ // Per 4.2.2, randomize (14 bit) clockseq
+ clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;
}
- ic();
- ic();
- return obj;
- eval(obj);
-}
+ } // UUID timestamps are 100 nano-second units since the Gregorian epoch,
+ // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so
+ // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
+ // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
-var rident = /^[a-z$_][a-z$_0-9]*$/i;
-function isIdentifier(str) {
- return rident.test(str);
-}
-function filledRange(count, prefix, suffix) {
- var ret = new Array(count);
- for(var i = 0; i < count; ++i) {
- ret[i] = prefix + i + suffix;
- }
- return ret;
-}
+ let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock
+ // cycle to simulate higher resolution clock
-function safeToString(obj) {
- try {
- return obj + "";
- } catch (e) {
- return "[no string representation]";
- }
-}
+ let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)
-function isError(obj) {
- return obj instanceof Error ||
- (obj !== null &&
- typeof obj === "object" &&
- typeof obj.message === "string" &&
- typeof obj.name === "string");
-}
+ const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression
-function markAsOriginatingFromRejection(e) {
- try {
- notEnumerableProp(e, "isOperational", true);
- }
- catch(ignore) {}
-}
+ if (dt < 0 && options.clockseq === undefined) {
+ clockseq = clockseq + 1 & 0x3fff;
+ } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
+ // time interval
-function originatesFromRejection(e) {
- if (e == null) return false;
- return ((e instanceof Error["__BluebirdErrorTypes__"].OperationalError) ||
- e["isOperational"] === true);
-}
-function canAttachTrace(obj) {
- return isError(obj) && es5.propertyIsWritable(obj, "stack");
-}
+ if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {
+ nsecs = 0;
+ } // Per 4.2.1.2 Throw error if too many uuids are requested
-var ensureErrorObject = (function() {
- if (!("stack" in new Error())) {
- return function(value) {
- if (canAttachTrace(value)) return value;
- try {throw new Error(safeToString(value));}
- catch(err) {return err;}
- };
- } else {
- return function(value) {
- if (canAttachTrace(value)) return value;
- return new Error(safeToString(value));
- };
- }
-})();
-function classString(obj) {
- return {}.toString.call(obj);
-}
+ if (nsecs >= 10000) {
+ throw new Error("uuid.v1(): Can't create more than 10M uuids/sec");
+ }
-function copyDescriptors(from, to, filter) {
- var keys = es5.names(from);
- for (var i = 0; i < keys.length; ++i) {
- var key = keys[i];
- if (filter(key)) {
- try {
- es5.defineProperty(to, key, es5.getDescriptor(from, key));
- } catch (ignore) {}
- }
- }
-}
+ _lastMSecs = msecs;
+ _lastNSecs = nsecs;
+ _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch
-var asArray = function(v) {
- if (es5.isArray(v)) {
- return v;
- }
- return null;
-};
+ msecs += 12219292800000; // `time_low`
-if (typeof Symbol !== "undefined" && Symbol.iterator) {
- var ArrayFrom = typeof Array.from === "function" ? function(v) {
- return Array.from(v);
- } : function(v) {
- var ret = [];
- var it = v[Symbol.iterator]();
- var itResult;
- while (!((itResult = it.next()).done)) {
- ret.push(itResult.value);
- }
- return ret;
- };
+ const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;
+ b[i++] = tl >>> 24 & 0xff;
+ b[i++] = tl >>> 16 & 0xff;
+ b[i++] = tl >>> 8 & 0xff;
+ b[i++] = tl & 0xff; // `time_mid`
- asArray = function(v) {
- if (es5.isArray(v)) {
- return v;
- } else if (v != null && typeof v[Symbol.iterator] === "function") {
- return ArrayFrom(v);
- }
- return null;
- };
-}
+ const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;
+ b[i++] = tmh >>> 8 & 0xff;
+ b[i++] = tmh & 0xff; // `time_high_and_version`
-var isNode = typeof process !== "undefined" &&
- classString(process).toLowerCase() === "[object process]";
+ b[i++] = tmh >>> 24 & 0xf | 0x10; // include version
-var hasEnvVariables = typeof process !== "undefined" &&
- typeof process.env !== "undefined";
+ b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
-function env(key) {
- return hasEnvVariables ? process.env[key] : undefined;
+ b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`
+
+ b[i++] = clockseq & 0xff; // `node`
+
+ for (let n = 0; n < 6; ++n) {
+ b[i + n] = node[n];
+ }
+
+ return buf || (0, _stringify.default)(b);
}
-function getNativePromise() {
- if (typeof Promise === "function") {
- try {
- var promise = new Promise(function(){});
- if (classString(promise) === "[object Promise]") {
- return Promise;
- }
- } catch (e) {}
+var _default = v1;
+exports.default = _default;
+
+/***/ }),
+/* 174 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+const ANY = Symbol('SemVer ANY')
+// hoisted class for cyclic dependency
+class Comparator {
+ static get ANY () {
+ return ANY
+ }
+ constructor (comp, options) {
+ options = parseOptions(options)
+
+ if (comp instanceof Comparator) {
+ if (comp.loose === !!options.loose) {
+ return comp
+ } else {
+ comp = comp.value
+ }
}
-}
-var reflectHandler;
-function contextBind(ctx, cb) {
- if (ctx === null ||
- typeof cb !== "function" ||
- cb === reflectHandler) {
- return cb;
+ debug('comparator', comp, options)
+ this.options = options
+ this.loose = !!options.loose
+ this.parse(comp)
+
+ if (this.semver === ANY) {
+ this.value = ''
+ } else {
+ this.value = this.operator + this.semver.version
}
- if (ctx.domain !== null) {
- cb = ctx.domain.bind(cb);
+ debug('comp', this)
+ }
+
+ parse (comp) {
+ const r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
+ const m = comp.match(r)
+
+ if (!m) {
+ throw new TypeError(`Invalid comparator: ${comp}`)
}
- var async = ctx.async;
- if (async !== null) {
- var old = cb;
- cb = function() {
- var $_len = arguments.length + 2;var args = new Array($_len); for(var $_i = 2; $_i < $_len ; ++$_i) {args[$_i] = arguments[$_i - 2];};
- args[0] = old;
- args[1] = this;
- return async.runInAsyncScope.apply(async, args);
- };
+ this.operator = m[1] !== undefined ? m[1] : ''
+ if (this.operator === '=') {
+ this.operator = ''
}
- return cb;
-}
-var ret = {
- setReflectHandler: function(fn) {
- reflectHandler = fn;
- },
- isClass: isClass,
- isIdentifier: isIdentifier,
- inheritedDataKeys: inheritedDataKeys,
- getDataPropertyOrDefault: getDataPropertyOrDefault,
- thrower: thrower,
- isArray: es5.isArray,
- asArray: asArray,
- notEnumerableProp: notEnumerableProp,
- isPrimitive: isPrimitive,
- isObject: isObject,
- isError: isError,
- canEvaluate: canEvaluate,
- errorObj: errorObj,
- tryCatch: tryCatch,
- inherits: inherits,
- withAppended: withAppended,
- maybeWrapAsError: maybeWrapAsError,
- toFastProperties: toFastProperties,
- filledRange: filledRange,
- toString: safeToString,
- canAttachTrace: canAttachTrace,
- ensureErrorObject: ensureErrorObject,
- originatesFromRejection: originatesFromRejection,
- markAsOriginatingFromRejection: markAsOriginatingFromRejection,
- classString: classString,
- copyDescriptors: copyDescriptors,
- isNode: isNode,
- hasEnvVariables: hasEnvVariables,
- env: env,
- global: globalObject,
- getNativePromise: getNativePromise,
- contextBind: contextBind
-};
-ret.isRecentNode = ret.isNode && (function() {
- var version;
- if (process.versions && process.versions.node) {
- version = process.versions.node.split(".").map(Number);
- } else if (process.version) {
- version = process.version.split(".").map(Number);
+ // if it literally is just '>' or '' then allow anything.
+ if (!m[2]) {
+ this.semver = ANY
+ } else {
+ this.semver = new SemVer(m[2], this.options.loose)
}
- return (version[0] === 0 && version[1] > 10) || (version[0] > 0);
-})();
-ret.nodeSupportsAsyncResource = ret.isNode && (function() {
- var supportsAsync = false;
- try {
- var res = __webpack_require__(303).AsyncResource;
- supportsAsync = typeof res.prototype.runInAsyncScope === "function";
- } catch (e) {
- supportsAsync = false;
+ }
+
+ toString () {
+ return this.value
+ }
+
+ test (version) {
+ debug('Comparator.test', version, this.options.loose)
+
+ if (this.semver === ANY || version === ANY) {
+ return true
}
- return supportsAsync;
-})();
-if (ret.isNode) ret.toFastProperties(process);
+ if (typeof version === 'string') {
+ try {
+ version = new SemVer(version, this.options)
+ } catch (er) {
+ return false
+ }
+ }
-try {throw new Error(); } catch (e) {ret.lastLineError = e;}
-module.exports = ret;
+ return cmp(version, this.operator, this.semver, this.options)
+ }
+ intersects (comp, options) {
+ if (!(comp instanceof Comparator)) {
+ throw new TypeError('a Comparator is required')
+ }
-/***/ }),
-/* 249 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+ if (!options || typeof options !== 'object') {
+ options = {
+ loose: !!options,
+ includePrerelease: false
+ }
+ }
-"use strict";
+ if (this.operator === '') {
+ if (this.value === '') {
+ return true
+ }
+ return new Range(comp.value, options).test(this.value)
+ } else if (comp.operator === '') {
+ if (comp.value === '') {
+ return true
+ }
+ return new Range(this.value, options).test(comp.semver)
+ }
+
+ const sameDirectionIncreasing =
+ (this.operator === '>=' || this.operator === '>') &&
+ (comp.operator === '>=' || comp.operator === '>')
+ const sameDirectionDecreasing =
+ (this.operator === '<=' || this.operator === '<') &&
+ (comp.operator === '<=' || comp.operator === '<')
+ const sameSemVer = this.semver.version === comp.semver.version
+ const differentDirectionsInclusive =
+ (this.operator === '>=' || this.operator === '<=') &&
+ (comp.operator === '>=' || comp.operator === '<=')
+ const oppositeDirectionsLessThan =
+ cmp(this.semver, '<', comp.semver, options) &&
+ (this.operator === '>=' || this.operator === '>') &&
+ (comp.operator === '<=' || comp.operator === '<')
+ const oppositeDirectionsGreaterThan =
+ cmp(this.semver, '>', comp.semver, options) &&
+ (this.operator === '<=' || this.operator === '<') &&
+ (comp.operator === '>=' || comp.operator === '>')
+ return (
+ sameDirectionIncreasing ||
+ sameDirectionDecreasing ||
+ (sameSemVer && differentDirectionsInclusive) ||
+ oppositeDirectionsLessThan ||
+ oppositeDirectionsGreaterThan
+ )
+ }
+}
-const BB = __webpack_require__(489)
+module.exports = Comparator
-const cacache = __webpack_require__(426)
-const cacheKey = __webpack_require__(279)
-const optCheck = __webpack_require__(420)
-const packlist = __webpack_require__(110)
-const pipe = BB.promisify(__webpack_require__(371).pipe)
-const tar = __webpack_require__(591)
+const parseOptions = __webpack_require__(143)
+const {re, t} = __webpack_require__(328)
+const cmp = __webpack_require__(752)
+const debug = __webpack_require__(548)
+const SemVer = __webpack_require__(206)
+const Range = __webpack_require__(124)
-module.exports = packDir
-function packDir (manifest, label, dir, target, opts) {
- opts = optCheck(opts)
- const packer = opts.dirPacker
- ? BB.resolve(opts.dirPacker(manifest, dir))
- : mkPacker(dir)
+/***/ }),
+/* 175 */
+/***/ (function(module) {
- if (!opts.cache) {
- return packer.then(packer => pipe(packer, target))
- } else {
- const cacher = cacache.put.stream(
- opts.cache, cacheKey('packed-dir', label), opts
- ).on('integrity', i => {
- target.emit('integrity', i)
- })
- return packer.then(packer => BB.all([
- pipe(packer, cacher),
- pipe(packer, target)
- ]))
- }
-}
+"use strict";
-function mkPacker (dir) {
- return packlist({ path: dir }).then(files => {
- return tar.c({
- cwd: dir,
- gzip: true,
- portable: true,
- prefix: 'package/'
- }, files)
- })
+
+module.exports = function spin (spinstr, spun) {
+ return spinstr[spun % spinstr.length]
}
/***/ }),
-/* 250 */
+/* 176 */,
+/* 177 */
/***/ (function(module, __unusedexports, __webpack_require__) {
-var constants = __webpack_require__(619)
+"use strict";
-var origCwd = process.cwd
-var cwd = null
-var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform
+const duck = __webpack_require__(843)
-process.cwd = function() {
- if (!cwd)
- cwd = origCwd.call(process)
- return cwd
+const Fetcher = duck.define(['spec', 'opts', 'manifest'], {
+ packument: ['spec', 'opts'],
+ manifest: ['spec', 'opts'],
+ tarball: ['spec', 'opts'],
+ fromManifest: ['manifest', 'spec', 'opts'],
+ clearMemoized () {}
+}, { name: 'Fetcher' })
+module.exports = Fetcher
+
+module.exports.packument = packument
+function packument (spec, opts) {
+ const fetcher = getFetcher(spec.type)
+ return fetcher.packument(spec, opts)
}
-try {
- process.cwd()
-} catch (er) {}
-var chdir = process.chdir
-process.chdir = function(d) {
- cwd = null
- chdir.call(process, d)
+module.exports.manifest = manifest
+function manifest (spec, opts) {
+ const fetcher = getFetcher(spec.type)
+ return fetcher.manifest(spec, opts)
}
-module.exports = patch
+module.exports.tarball = tarball
+function tarball (spec, opts) {
+ return getFetcher(spec.type).tarball(spec, opts)
+}
-function patch (fs) {
- // (re-)implement some things that are known busted or missing.
+module.exports.fromManifest = fromManifest
+function fromManifest (manifest, spec, opts) {
+ return getFetcher(spec.type).fromManifest(manifest, spec, opts)
+}
- // lchmod, broken prior to 0.6.2
- // back-port the fix here.
- if (constants.hasOwnProperty('O_SYMLINK') &&
- process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
- patchLchmod(fs)
- }
+const fetchers = {}
- // lutimes implementation, or no-op
- if (!fs.lutimes) {
- patchLutimes(fs)
+module.exports.clearMemoized = clearMemoized
+function clearMemoized () {
+ Object.keys(fetchers).forEach(k => {
+ fetchers[k].clearMemoized()
+ })
+}
+
+function getFetcher (type) {
+ if (!fetchers[type]) {
+ // This is spelled out both to prevent sketchy stuff and to make life
+ // easier for bundlers/preprocessors.
+ switch (type) {
+ case 'alias':
+ fetchers[type] = __webpack_require__(457)
+ break
+ case 'directory':
+ fetchers[type] = __webpack_require__(879)
+ break
+ case 'file':
+ fetchers[type] = __webpack_require__(212)
+ break
+ case 'git':
+ fetchers[type] = __webpack_require__(532)
+ break
+ case 'hosted':
+ fetchers[type] = __webpack_require__(804)
+ break
+ case 'range':
+ fetchers[type] = __webpack_require__(987)
+ break
+ case 'remote':
+ fetchers[type] = __webpack_require__(507)
+ break
+ case 'tag':
+ fetchers[type] = __webpack_require__(578)
+ break
+ case 'version':
+ fetchers[type] = __webpack_require__(438)
+ break
+ default:
+ throw new Error(`Invalid dependency type requested: ${type}`)
+ }
}
+ return fetchers[type]
+}
- // https://github.com/isaacs/node-graceful-fs/issues/4
- // Chown should not fail on einval or eperm if non-root.
- // It should not fail on enosys ever, as this just indicates
- // that a fs doesn't support the intended operation.
- fs.chown = chownFix(fs.chown)
- fs.fchown = chownFix(fs.fchown)
- fs.lchown = chownFix(fs.lchown)
+/***/ }),
+/* 178 */
+/***/ (function(module) {
- fs.chmod = chmodFix(fs.chmod)
- fs.fchmod = chmodFix(fs.fchmod)
- fs.lchmod = chmodFix(fs.lchmod)
+module.exports = ["389-exception","Autoconf-exception-2.0","Autoconf-exception-3.0","Bison-exception-2.2","Bootloader-exception","Classpath-exception-2.0","CLISP-exception-2.0","DigiRule-FOSS-exception","eCos-exception-2.0","Fawkes-Runtime-exception","FLTK-exception","Font-exception-2.0","freertos-exception-2.0","GCC-exception-2.0","GCC-exception-3.1","gnu-javamail-exception","GPL-3.0-linking-exception","GPL-3.0-linking-source-exception","GPL-CC-1.0","i2p-gpl-java-exception","Libtool-exception","Linux-syscall-note","LLVM-exception","LZMA-exception","mif-exception","Nokia-Qt-exception-1.1","OCaml-LGPL-linking-exception","OCCT-exception-1.0","OpenJDK-assembly-exception-1.0","openvpn-openssl-exception","PS-or-PDF-font-exception-20170817","Qt-GPL-exception-1.0","Qt-LGPL-exception-1.1","Qwt-exception-1.0","Swift-exception","u-boot-exception-2.0","Universal-FOSS-exception-1.0","WxWindows-exception-3.1"];
- fs.chownSync = chownFixSync(fs.chownSync)
- fs.fchownSync = chownFixSync(fs.fchownSync)
- fs.lchownSync = chownFixSync(fs.lchownSync)
+/***/ }),
+/* 179 */,
+/* 180 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
- fs.chmodSync = chmodFixSync(fs.chmodSync)
- fs.fchmodSync = chmodFixSync(fs.fchmodSync)
- fs.lchmodSync = chmodFixSync(fs.lchmodSync)
+"use strict";
- fs.stat = statFix(fs.stat)
- fs.fstat = statFix(fs.fstat)
- fs.lstat = statFix(fs.lstat)
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.run = void 0;
+const core_1 = __webpack_require__(470);
+const install_1 = __webpack_require__(655);
+const tools_1 = __webpack_require__(534);
+function run() {
+ return __awaiter(this, void 0, void 0, function* () {
+ const config = {
+ version: core_1.getInput('expo-version') || 'latest',
+ packager: core_1.getInput('expo-packager') || 'yarn',
+ cache: (core_1.getInput('expo-cache') || 'false') === 'true',
+ cacheKey: core_1.getInput('expo-cache-key') || undefined,
+ };
+ // Resolve the exact requested Expo CLI version
+ config.version = yield tools_1.resolveVersion(config.version);
+ const path = yield core_1.group(config.cache
+ ? `Installing Expo CLI (${config.version}) from cache or with ${config.packager}`
+ : `Installing Expo CLI (${config.version}) with ${config.packager}`, () => install_1.install(config));
+ core_1.addPath(path);
+ yield core_1.group('Checking current authenticated account', () => tools_1.maybeAuthenticate({
+ token: core_1.getInput('expo-token') || undefined,
+ username: core_1.getInput('expo-username') || undefined,
+ password: core_1.getInput('expo-password') || undefined,
+ }));
+ const shouldPatchWatchers = core_1.getInput('expo-patch-watchers') || 'true';
+ if (shouldPatchWatchers !== 'false') {
+ yield core_1.group('Patching system watchers for the `ENOSPC` error', () => tools_1.maybePatchWatchers());
+ }
+ });
+}
+exports.run = run;
- fs.statSync = statFixSync(fs.statSync)
- fs.fstatSync = statFixSync(fs.fstatSync)
- fs.lstatSync = statFixSync(fs.lstatSync)
- // if lchmod/lchown do not exist, then make them no-ops
- if (!fs.lchmod) {
- fs.lchmod = function (path, mode, cb) {
- if (cb) process.nextTick(cb)
- }
- fs.lchmodSync = function () {}
- }
- if (!fs.lchown) {
- fs.lchown = function (path, uid, gid, cb) {
- if (cb) process.nextTick(cb)
- }
- fs.lchownSync = function () {}
- }
+/***/ }),
+/* 181 */
+/***/ (function(module) {
- // on Windows, A/V software can lock the directory, causing this
- // to fail with an EACCES or EPERM if the directory contains newly
- // created files. Try again on failure, for up to 60 seconds.
+// Note: this is the semver.org version of the spec that it implements
+// Not necessarily the package version of this code.
+const SEMVER_SPEC_VERSION = '2.0.0'
- // Set the timeout this long because some Windows Anti-Virus, such as Parity
- // bit9, may lock files for up to a minute, causing npm package install
- // failures. Also, take care to yield the scheduler. Windows scheduling gives
- // CPU to a busy looping process, which can cause the program causing the lock
- // contention to be starved of CPU by node, so the contention doesn't resolve.
- if (platform === "win32") {
- fs.rename = (function (fs$rename) { return function (from, to, cb) {
- var start = Date.now()
- var backoff = 0;
- fs$rename(from, to, function CB (er) {
- if (er
- && (er.code === "EACCES" || er.code === "EPERM")
- && Date.now() - start < 60000) {
- setTimeout(function() {
- fs.stat(to, function (stater, st) {
- if (stater && stater.code === "ENOENT")
- fs$rename(from, to, CB);
- else
- cb(er)
- })
- }, backoff)
- if (backoff < 100)
- backoff += 10;
- return;
- }
- if (cb) cb(er)
- })
- }})(fs.rename)
- }
+const MAX_LENGTH = 256
+const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
+ /* istanbul ignore next */ 9007199254740991
- // if read() returns EAGAIN, then just try it again.
- fs.read = (function (fs$read) {
- function read (fd, buffer, offset, length, position, callback_) {
- var callback
- if (callback_ && typeof callback_ === 'function') {
- var eagCounter = 0
- callback = function (er, _, __) {
- if (er && er.code === 'EAGAIN' && eagCounter < 10) {
- eagCounter ++
- return fs$read.call(fs, fd, buffer, offset, length, position, callback)
- }
- callback_.apply(this, arguments)
- }
- }
- return fs$read.call(fs, fd, buffer, offset, length, position, callback)
- }
+// Max safe segment length for coercion.
+const MAX_SAFE_COMPONENT_LENGTH = 16
- // This ensures `util.promisify` works as it does for native `fs.read`.
- read.__proto__ = fs$read
- return read
- })(fs.read)
+module.exports = {
+ SEMVER_SPEC_VERSION,
+ MAX_LENGTH,
+ MAX_SAFE_INTEGER,
+ MAX_SAFE_COMPONENT_LENGTH
+}
- fs.readSync = (function (fs$readSync) { return function (fd, buffer, offset, length, position) {
- var eagCounter = 0
- while (true) {
- try {
- return fs$readSync.call(fs, fd, buffer, offset, length, position)
- } catch (er) {
- if (er.code === 'EAGAIN' && eagCounter < 10) {
- eagCounter ++
- continue
- }
- throw er
- }
- }
- }})(fs.readSync)
- function patchLchmod (fs) {
- fs.lchmod = function (path, mode, callback) {
- fs.open( path
- , constants.O_WRONLY | constants.O_SYMLINK
- , mode
- , function (err, fd) {
- if (err) {
- if (callback) callback(err)
- return
- }
- // prefer to return the chmod error, if one occurs,
- // but still try to close, and report closing errors if they occur.
- fs.fchmod(fd, mode, function (err) {
- fs.close(fd, function(err2) {
- if (callback) callback(err || err2)
- })
- })
- })
- }
+/***/ }),
+/* 182 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
- fs.lchmodSync = function (path, mode) {
- var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)
+"use strict";
- // prefer to return the chmod error, if one occurs,
- // but still try to close, and report closing errors if they occur.
- var threw = true
- var ret
- try {
- ret = fs.fchmodSync(fd, mode)
- threw = false
- } finally {
- if (threw) {
- try {
- fs.closeSync(fd)
- } catch (er) {}
- } else {
- fs.closeSync(fd)
- }
- }
- return ret
- }
- }
+module.exports = writeFile
+module.exports.sync = writeFileSync
+module.exports._getTmpname = getTmpname // for testing
+module.exports._cleanupOnExit = cleanupOnExit
- function patchLutimes (fs) {
- if (constants.hasOwnProperty("O_SYMLINK")) {
- fs.lutimes = function (path, at, mt, cb) {
- fs.open(path, constants.O_SYMLINK, function (er, fd) {
- if (er) {
- if (cb) cb(er)
- return
- }
- fs.futimes(fd, at, mt, function (er) {
- fs.close(fd, function (er2) {
- if (cb) cb(er || er2)
- })
- })
- })
- }
+var fs = __webpack_require__(598)
+var MurmurHash3 = __webpack_require__(188)
+var onExit = __webpack_require__(497)
+var path = __webpack_require__(622)
+var activeFiles = {}
- fs.lutimesSync = function (path, at, mt) {
- var fd = fs.openSync(path, constants.O_SYMLINK)
- var ret
- var threw = true
- try {
- ret = fs.futimesSync(fd, at, mt)
- threw = false
- } finally {
- if (threw) {
- try {
- fs.closeSync(fd)
- } catch (er) {}
- } else {
- fs.closeSync(fd)
- }
- }
- return ret
- }
+// if we run inside of a worker_thread, `process.pid` is not unique
+/* istanbul ignore next */
+var threadId = (function getId () {
+ try {
+ var workerThreads = __webpack_require__(13)
- } else {
- fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) }
- fs.lutimesSync = function () {}
- }
+ /// if we are in main thread, this is set to `0`
+ return workerThreads.threadId
+ } catch (e) {
+ // worker_threads are not available, fallback to 0
+ return 0
}
+})()
- function chmodFix (orig) {
- if (!orig) return orig
- return function (target, mode, cb) {
- return orig.call(fs, target, mode, function (er) {
- if (chownErOk(er)) er = null
- if (cb) cb.apply(this, arguments)
- })
- }
+var invocations = 0
+function getTmpname (filename) {
+ return filename + '.' +
+ MurmurHash3(__filename)
+ .hash(String(process.pid))
+ .hash(String(threadId))
+ .hash(String(++invocations))
+ .result()
+}
+
+function cleanupOnExit (tmpfile) {
+ return function () {
+ try {
+ fs.unlinkSync(typeof tmpfile === 'function' ? tmpfile() : tmpfile)
+ } catch (_) {}
}
+}
- function chmodFixSync (orig) {
- if (!orig) return orig
- return function (target, mode) {
- try {
- return orig.call(fs, target, mode)
- } catch (er) {
- if (!chownErOk(er)) throw er
- }
+function writeFile (filename, data, options, callback) {
+ if (options) {
+ if (options instanceof Function) {
+ callback = options
+ options = {}
+ } else if (typeof options === 'string') {
+ options = { encoding: options }
}
+ } else {
+ options = {}
}
+ var Promise = options.Promise || global.Promise
+ var truename
+ var fd
+ var tmpfile
+ /* istanbul ignore next -- The closure only gets called when onExit triggers */
+ var removeOnExitHandler = onExit(cleanupOnExit(() => tmpfile))
+ var absoluteName = path.resolve(filename)
+
+ new Promise(function serializeSameFile (resolve) {
+ // make a queue if it doesn't already exist
+ if (!activeFiles[absoluteName]) activeFiles[absoluteName] = []
- function chownFix (orig) {
- if (!orig) return orig
- return function (target, uid, gid, cb) {
- return orig.call(fs, target, uid, gid, function (er) {
- if (chownErOk(er)) er = null
- if (cb) cb.apply(this, arguments)
+ activeFiles[absoluteName].push(resolve) // add this job to the queue
+ if (activeFiles[absoluteName].length === 1) resolve() // kick off the first one
+ }).then(function getRealPath () {
+ return new Promise(function (resolve) {
+ fs.realpath(filename, function (_, realname) {
+ truename = realname || filename
+ tmpfile = getTmpname(truename)
+ resolve()
+ })
+ })
+ }).then(function stat () {
+ return new Promise(function stat (resolve) {
+ if (options.mode && options.chown) resolve()
+ else {
+ // Either mode or chown is not explicitly set
+ // Default behavior is to copy it from original file
+ fs.stat(truename, function (err, stats) {
+ if (err || !stats) resolve()
+ else {
+ options = Object.assign({}, options)
+
+ if (options.mode == null) {
+ options.mode = stats.mode
+ }
+ if (options.chown == null && process.getuid) {
+ options.chown = { uid: stats.uid, gid: stats.gid }
+ }
+ resolve()
+ }
+ })
+ }
+ })
+ }).then(function thenWriteFile () {
+ return new Promise(function (resolve, reject) {
+ fs.open(tmpfile, 'w', options.mode, function (err, _fd) {
+ fd = _fd
+ if (err) reject(err)
+ else resolve()
+ })
+ })
+ }).then(function write () {
+ return new Promise(function (resolve, reject) {
+ if (Buffer.isBuffer(data)) {
+ fs.write(fd, data, 0, data.length, 0, function (err) {
+ if (err) reject(err)
+ else resolve()
+ })
+ } else if (data != null) {
+ fs.write(fd, String(data), 0, String(options.encoding || 'utf8'), function (err) {
+ if (err) reject(err)
+ else resolve()
+ })
+ } else resolve()
+ })
+ }).then(function syncAndClose () {
+ return new Promise(function (resolve, reject) {
+ if (options.fsync !== false) {
+ fs.fsync(fd, function (err) {
+ if (err) fs.close(fd, () => reject(err))
+ else fs.close(fd, resolve)
+ })
+ } else {
+ fs.close(fd, resolve)
+ }
+ })
+ }).then(function chown () {
+ fd = null
+ if (options.chown) {
+ return new Promise(function (resolve, reject) {
+ fs.chown(tmpfile, options.chown.uid, options.chown.gid, function (err) {
+ if (err) reject(err)
+ else resolve()
+ })
+ })
+ }
+ }).then(function chmod () {
+ if (options.mode) {
+ return new Promise(function (resolve, reject) {
+ fs.chmod(tmpfile, options.mode, function (err) {
+ if (err) reject(err)
+ else resolve()
+ })
})
}
+ }).then(function rename () {
+ return new Promise(function (resolve, reject) {
+ fs.rename(tmpfile, truename, function (err) {
+ if (err) reject(err)
+ else resolve()
+ })
+ })
+ }).then(function success () {
+ removeOnExitHandler()
+ callback()
+ }, function fail (err) {
+ return new Promise(resolve => {
+ return fd ? fs.close(fd, resolve) : resolve()
+ }).then(() => {
+ removeOnExitHandler()
+ fs.unlink(tmpfile, function () {
+ callback(err)
+ })
+ })
+ }).then(function checkQueue () {
+ activeFiles[absoluteName].shift() // remove the element added by serializeSameFile
+ if (activeFiles[absoluteName].length > 0) {
+ activeFiles[absoluteName][0]() // start next job if one is pending
+ } else delete activeFiles[absoluteName]
+ })
+}
+
+function writeFileSync (filename, data, options) {
+ if (typeof options === 'string') options = { encoding: options }
+ else if (!options) options = {}
+ try {
+ filename = fs.realpathSync(filename)
+ } catch (ex) {
+ // it's ok, it'll happen on a not yet existing file
}
+ var tmpfile = getTmpname(filename)
- function chownFixSync (orig) {
- if (!orig) return orig
- return function (target, uid, gid) {
- try {
- return orig.call(fs, target, uid, gid)
- } catch (er) {
- if (!chownErOk(er)) throw er
+ if (!options.mode || !options.chown) {
+ // Either mode or chown is not explicitly set
+ // Default behavior is to copy it from original file
+ try {
+ var stats = fs.statSync(filename)
+ options = Object.assign({}, options)
+ if (!options.mode) {
+ options.mode = stats.mode
+ }
+ if (!options.chown && process.getuid) {
+ options.chown = { uid: stats.uid, gid: stats.gid }
}
+ } catch (ex) {
+ // ignore stat errors
}
}
- function statFix (orig) {
- if (!orig) return orig
- // Older versions of Node erroneously returned signed integers for
- // uid + gid.
- return function (target, options, cb) {
- if (typeof options === 'function') {
- cb = options
- options = null
- }
- function callback (er, stats) {
- if (stats) {
- if (stats.uid < 0) stats.uid += 0x100000000
- if (stats.gid < 0) stats.gid += 0x100000000
- }
- if (cb) cb.apply(this, arguments)
+ var fd
+ var cleanup = cleanupOnExit(tmpfile)
+ var removeOnExitHandler = onExit(cleanup)
+
+ try {
+ fd = fs.openSync(tmpfile, 'w', options.mode)
+ if (Buffer.isBuffer(data)) {
+ fs.writeSync(fd, data, 0, data.length, 0)
+ } else if (data != null) {
+ fs.writeSync(fd, String(data), 0, String(options.encoding || 'utf8'))
+ }
+ if (options.fsync !== false) {
+ fs.fsyncSync(fd)
+ }
+ fs.closeSync(fd)
+ if (options.chown) fs.chownSync(tmpfile, options.chown.uid, options.chown.gid)
+ if (options.mode) fs.chmodSync(tmpfile, options.mode)
+ fs.renameSync(tmpfile, filename)
+ removeOnExitHandler()
+ } catch (err) {
+ if (fd) {
+ try {
+ fs.closeSync(fd)
+ } catch (ex) {
+ // ignore close errors at this stage, error may have closed fd already.
}
- return options ? orig.call(fs, target, options, callback)
- : orig.call(fs, target, callback)
}
+ removeOnExitHandler()
+ cleanup()
+ throw err
}
+}
- function statFixSync (orig) {
- if (!orig) return orig
- // Older versions of Node erroneously returned signed integers for
- // uid + gid.
- return function (target, options) {
- var stats = options ? orig.call(fs, target, options)
- : orig.call(fs, target)
- if (stats.uid < 0) stats.uid += 0x100000000
- if (stats.gid < 0) stats.gid += 0x100000000
- return stats;
- }
- }
- // ENOSYS means that the fs doesn't support the op. Just ignore
- // that, because it doesn't matter.
- //
- // if there's no getuid, or if getuid() is something other
- // than 0, and the error is EINVAL or EPERM, then just ignore
- // it.
- //
- // This specific case is a silent failure in cp, install, tar,
- // and most other unix tools that manage permissions.
- //
- // When running as root, or if other types of errors are
- // encountered, then it's strict.
- function chownErOk (er) {
- if (!er)
- return true
+/***/ }),
+/* 183 */
+/***/ (function(module) {
- if (er.code === "ENOSYS")
- return true
+function RetryOperation(timeouts, options) {
+ // Compatibility for the old (timeouts, retryForever) signature
+ if (typeof options === 'boolean') {
+ options = { forever: options };
+ }
- var nonroot = !process.getuid || process.getuid() !== 0
- if (nonroot) {
- if (er.code === "EINVAL" || er.code === "EPERM")
- return true
- }
+ this._timeouts = timeouts;
+ this._options = options || {};
+ this._fn = null;
+ this._errors = [];
+ this._attempts = 1;
+ this._operationTimeout = null;
+ this._operationTimeoutCb = null;
+ this._timeout = null;
- return false
+ if (this._options.forever) {
+ this._cachedTimeouts = this._timeouts.slice(0);
}
}
+module.exports = RetryOperation;
+RetryOperation.prototype.stop = function() {
+ if (this._timeout) {
+ clearTimeout(this._timeout);
+ }
-/***/ }),
-/* 251 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
- result["default"] = mod;
- return result;
+ this._timeouts = [];
+ this._cachedTimeouts = null;
};
-Object.defineProperty(exports, "__esModule", { value: true });
-const core = __importStar(__webpack_require__(470));
-const http_client_1 = __webpack_require__(22);
-const storage_blob_1 = __webpack_require__(373);
-const buffer = __importStar(__webpack_require__(293));
-const fs = __importStar(__webpack_require__(747));
-const stream = __importStar(__webpack_require__(794));
-const util = __importStar(__webpack_require__(669));
-const utils = __importStar(__webpack_require__(15));
-const constants_1 = __webpack_require__(931);
-const requestUtils_1 = __webpack_require__(899);
-/**
- * Pipes the body of a HTTP response to a stream
- *
- * @param response the HTTP response
- * @param output the writable stream
- */
-function pipeResponseToStream(response, output) {
- return __awaiter(this, void 0, void 0, function* () {
- const pipeline = util.promisify(stream.pipeline);
- yield pipeline(response.message, output);
- });
-}
-/**
- * Class for tracking the download state and displaying stats.
- */
-class DownloadProgress {
- constructor(contentLength) {
- this.contentLength = contentLength;
- this.segmentIndex = 0;
- this.segmentSize = 0;
- this.segmentOffset = 0;
- this.receivedBytes = 0;
- this.displayedComplete = false;
- this.startTime = Date.now();
- }
- /**
- * Progress to the next segment. Only call this method when the previous segment
- * is complete.
- *
- * @param segmentSize the length of the next segment
- */
- nextSegment(segmentSize) {
- this.segmentOffset = this.segmentOffset + this.segmentSize;
- this.segmentIndex = this.segmentIndex + 1;
- this.segmentSize = segmentSize;
- this.receivedBytes = 0;
- core.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`);
- }
- /**
- * Sets the number of bytes received for the current segment.
- *
- * @param receivedBytes the number of bytes received
- */
- setReceivedBytes(receivedBytes) {
- this.receivedBytes = receivedBytes;
- }
- /**
- * Returns the total number of bytes transferred.
- */
- getTransferredBytes() {
- return this.segmentOffset + this.receivedBytes;
- }
- /**
- * Returns true if the download is complete.
- */
- isDone() {
- return this.getTransferredBytes() === this.contentLength;
- }
- /**
- * Prints the current download stats. Once the download completes, this will print one
- * last line and then stop.
- */
- display() {
- if (this.displayedComplete) {
- return;
- }
- const transferredBytes = this.segmentOffset + this.receivedBytes;
- const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1);
- const elapsedTime = Date.now() - this.startTime;
- const downloadSpeed = (transferredBytes /
- (1024 * 1024) /
- (elapsedTime / 1000)).toFixed(1);
- core.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`);
- if (this.isDone()) {
- this.displayedComplete = true;
- }
- }
- /**
- * Returns a function used to handle TransferProgressEvents.
- */
- onProgress() {
- return (progress) => {
- this.setReceivedBytes(progress.loadedBytes);
- };
- }
- /**
- * Starts the timer that displays the stats.
- *
- * @param delayInMs the delay between each write
- */
- startDisplayTimer(delayInMs = 1000) {
- const displayCallback = () => {
- this.display();
- if (!this.isDone()) {
- this.timeoutHandle = setTimeout(displayCallback, delayInMs);
- }
- };
- this.timeoutHandle = setTimeout(displayCallback, delayInMs);
- }
- /**
- * Stops the timer that displays the stats. As this typically indicates the download
- * is complete, this will display one last line, unless the last line has already
- * been written.
- */
- stopDisplayTimer() {
- if (this.timeoutHandle) {
- clearTimeout(this.timeoutHandle);
- this.timeoutHandle = undefined;
- }
- this.display();
- }
-}
-exports.DownloadProgress = DownloadProgress;
-/**
- * Download the cache using the Actions toolkit http-client
- *
- * @param archiveLocation the URL for the cache
- * @param archivePath the local path where the cache is saved
- */
-function downloadCacheHttpClient(archiveLocation, archivePath) {
- return __awaiter(this, void 0, void 0, function* () {
- const writeStream = fs.createWriteStream(archivePath);
- const httpClient = new http_client_1.HttpClient('actions/cache');
- const downloadResponse = yield requestUtils_1.retryHttpClientResponse('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); }));
- // Abort download if no traffic received over the socket.
- downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => {
- downloadResponse.message.destroy();
- core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`);
- });
- yield pipeResponseToStream(downloadResponse, writeStream);
- // Validate download size.
- const contentLengthHeader = downloadResponse.message.headers['content-length'];
- if (contentLengthHeader) {
- const expectedLength = parseInt(contentLengthHeader);
- const actualLength = utils.getArchiveFileSizeIsBytes(archivePath);
- if (actualLength !== expectedLength) {
- throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
- }
- }
- else {
- core.debug('Unable to validate download, no Content-Length header');
- }
- });
-}
-exports.downloadCacheHttpClient = downloadCacheHttpClient;
-/**
- * Download the cache using the Azure Storage SDK. Only call this method if the
- * URL points to an Azure Storage endpoint.
- *
- * @param archiveLocation the URL for the cache
- * @param archivePath the local path where the cache is saved
- * @param options the download options with the defaults set
- */
-function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
- var _a;
- return __awaiter(this, void 0, void 0, function* () {
- const client = new storage_blob_1.BlockBlobClient(archiveLocation, undefined, {
- retryOptions: {
- // Override the timeout used when downloading each 4 MB chunk
- // The default is 2 min / MB, which is way too slow
- tryTimeoutInMs: options.timeoutInMs
- }
- });
- const properties = yield client.getProperties();
- const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1;
- if (contentLength < 0) {
- // We should never hit this condition, but just in case fall back to downloading the
- // file as one large stream
- core.debug('Unable to determine content length, downloading file with http-client...');
- yield downloadCacheHttpClient(archiveLocation, archivePath);
- }
- else {
- // Use downloadToBuffer for faster downloads, since internally it splits the
- // file into 4 MB chunks which can then be parallelized and retried independently
- //
- // If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB
- // on 64-bit systems), split the download into multiple segments
- const maxSegmentSize = buffer.constants.MAX_LENGTH;
- const downloadProgress = new DownloadProgress(contentLength);
- const fd = fs.openSync(archivePath, 'w');
- try {
- downloadProgress.startDisplayTimer();
- while (!downloadProgress.isDone()) {
- const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;
- const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);
- downloadProgress.nextSegment(segmentSize);
- const result = yield client.downloadToBuffer(segmentStart, segmentSize, {
- concurrency: options.downloadConcurrency,
- onProgress: downloadProgress.onProgress()
- });
- fs.writeFileSync(fd, result);
- }
- }
- finally {
- downloadProgress.stopDisplayTimer();
- fs.closeSync(fd);
- }
- }
- });
-}
-exports.downloadCacheStorageSDK = downloadCacheStorageSDK;
-//# sourceMappingURL=downloadUtils.js.map
-
-/***/ }),
-/* 252 */,
-/* 253 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-"use strict";
+RetryOperation.prototype.retry = function(err) {
+ if (this._timeout) {
+ clearTimeout(this._timeout);
+ }
-module.exports = function(NEXT_FILTER) {
-var util = __webpack_require__(248);
-var getKeys = __webpack_require__(883).keys;
-var tryCatch = util.tryCatch;
-var errorObj = util.errorObj;
+ if (!err) {
+ return false;
+ }
-function catchFilter(instances, cb, promise) {
- return function(e) {
- var boundTo = promise._boundValue();
- predicateLoop: for (var i = 0; i < instances.length; ++i) {
- var item = instances[i];
+ this._errors.push(err);
- if (item === Error ||
- (item != null && item.prototype instanceof Error)) {
- if (e instanceof item) {
- return tryCatch(cb).call(boundTo, e);
- }
- } else if (typeof item === "function") {
- var matchesPredicate = tryCatch(item).call(boundTo, e);
- if (matchesPredicate === errorObj) {
- return matchesPredicate;
- } else if (matchesPredicate) {
- return tryCatch(cb).call(boundTo, e);
- }
- } else if (util.isObject(e)) {
- var keys = getKeys(item);
- for (var j = 0; j < keys.length; ++j) {
- var key = keys[j];
- if (item[key] != e[key]) {
- continue predicateLoop;
- }
- }
- return tryCatch(cb).call(boundTo, e);
- }
- }
- return NEXT_FILTER;
- };
-}
+ var timeout = this._timeouts.shift();
+ if (timeout === undefined) {
+ if (this._cachedTimeouts) {
+ // retry forever, only keep last error
+ this._errors.splice(this._errors.length - 1, this._errors.length);
+ this._timeouts = this._cachedTimeouts.slice(0);
+ timeout = this._timeouts.shift();
+ } else {
+ return false;
+ }
+ }
-return catchFilter;
-};
+ var self = this;
+ var timer = setTimeout(function() {
+ self._attempts++;
+ if (self._operationTimeoutCb) {
+ self._timeout = setTimeout(function() {
+ self._operationTimeoutCb(self._attempts);
+ }, self._operationTimeout);
-/***/ }),
-/* 254 */
-/***/ (function(module, exports, __webpack_require__) {
+ if (this._options.unref) {
+ self._timeout.unref();
+ }
+ }
-/* eslint-disable node/no-deprecated-api */
-var buffer = __webpack_require__(293)
-var Buffer = buffer.Buffer
+ self._fn(self._attempts);
+ }, timeout);
-// alternative to using Object.keys for old browsers
-function copyProps (src, dst) {
- for (var key in src) {
- dst[key] = src[key]
+ if (this._options.unref) {
+ timer.unref();
}
-}
-if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {
- module.exports = buffer
-} else {
- // Copy properties from require('buffer')
- copyProps(buffer, exports)
- exports.Buffer = SafeBuffer
-}
-
-function SafeBuffer (arg, encodingOrOffset, length) {
- return Buffer(arg, encodingOrOffset, length)
-}
-// Copy static methods from Buffer
-copyProps(Buffer, SafeBuffer)
+ return true;
+};
-SafeBuffer.from = function (arg, encodingOrOffset, length) {
- if (typeof arg === 'number') {
- throw new TypeError('Argument must not be a number')
- }
- return Buffer(arg, encodingOrOffset, length)
-}
+RetryOperation.prototype.attempt = function(fn, timeoutOps) {
+ this._fn = fn;
-SafeBuffer.alloc = function (size, fill, encoding) {
- if (typeof size !== 'number') {
- throw new TypeError('Argument must be a number')
- }
- var buf = Buffer(size)
- if (fill !== undefined) {
- if (typeof encoding === 'string') {
- buf.fill(fill, encoding)
- } else {
- buf.fill(fill)
+ if (timeoutOps) {
+ if (timeoutOps.timeout) {
+ this._operationTimeout = timeoutOps.timeout;
+ }
+ if (timeoutOps.cb) {
+ this._operationTimeoutCb = timeoutOps.cb;
}
- } else {
- buf.fill(0)
- }
- return buf
-}
-
-SafeBuffer.allocUnsafe = function (size) {
- if (typeof size !== 'number') {
- throw new TypeError('Argument must be a number')
}
- return Buffer(size)
-}
-SafeBuffer.allocUnsafeSlow = function (size) {
- if (typeof size !== 'number') {
- throw new TypeError('Argument must be a number')
+ var self = this;
+ if (this._operationTimeoutCb) {
+ this._timeout = setTimeout(function() {
+ self._operationTimeoutCb();
+ }, self._operationTimeout);
}
- return buffer.SlowBuffer(size)
-}
-
-/***/ }),
-/* 255 */,
-/* 256 */,
-/* 257 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+ this._fn(this._attempts);
+};
-// Generated by CoffeeScript 1.12.7
-(function() {
- var DocumentPosition, NodeType, XMLCData, XMLComment, XMLDeclaration, XMLDocType, XMLDummy, XMLElement, XMLNamedNodeMap, XMLNode, XMLNodeList, XMLProcessingInstruction, XMLRaw, XMLText, getValue, isEmpty, isFunction, isObject, ref1,
- hasProp = {}.hasOwnProperty;
+RetryOperation.prototype.try = function(fn) {
+ console.log('Using RetryOperation.try() is deprecated');
+ this.attempt(fn);
+};
- ref1 = __webpack_require__(582), isObject = ref1.isObject, isFunction = ref1.isFunction, isEmpty = ref1.isEmpty, getValue = ref1.getValue;
+RetryOperation.prototype.start = function(fn) {
+ console.log('Using RetryOperation.start() is deprecated');
+ this.attempt(fn);
+};
- XMLElement = null;
+RetryOperation.prototype.start = RetryOperation.prototype.try;
- XMLCData = null;
+RetryOperation.prototype.errors = function() {
+ return this._errors;
+};
- XMLComment = null;
+RetryOperation.prototype.attempts = function() {
+ return this._attempts;
+};
- XMLDeclaration = null;
+RetryOperation.prototype.mainError = function() {
+ if (this._errors.length === 0) {
+ return null;
+ }
- XMLDocType = null;
+ var counts = {};
+ var mainError = null;
+ var mainErrorCount = 0;
- XMLRaw = null;
+ for (var i = 0; i < this._errors.length; i++) {
+ var error = this._errors[i];
+ var message = error.message;
+ var count = (counts[message] || 0) + 1;
- XMLText = null;
+ counts[message] = count;
- XMLProcessingInstruction = null;
+ if (count >= mainErrorCount) {
+ mainError = error;
+ mainErrorCount = count;
+ }
+ }
- XMLDummy = null;
+ return mainError;
+};
- NodeType = null;
- XMLNodeList = null;
+/***/ }),
+/* 184 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
- XMLNamedNodeMap = null;
+"use strict";
- DocumentPosition = null;
+module.exports = move
- module.exports = XMLNode = (function() {
- function XMLNode(parent1) {
- this.parent = parent1;
- if (this.parent) {
- this.options = this.parent.options;
- this.stringify = this.parent.stringify;
- }
- this.value = null;
- this.children = [];
- this.baseURI = null;
- if (!XMLElement) {
- XMLElement = __webpack_require__(796);
- XMLCData = __webpack_require__(657);
- XMLComment = __webpack_require__(919);
- XMLDeclaration = __webpack_require__(738);
- XMLDocType = __webpack_require__(735);
- XMLRaw = __webpack_require__(660);
- XMLText = __webpack_require__(708);
- XMLProcessingInstruction = __webpack_require__(491);
- XMLDummy = __webpack_require__(956);
- NodeType = __webpack_require__(683);
- XMLNodeList = __webpack_require__(300);
- XMLNamedNodeMap = __webpack_require__(451);
- DocumentPosition = __webpack_require__(65);
- }
- }
+var nodeFs = __webpack_require__(747)
+var rimraf = __webpack_require__(993)
+var validate = __webpack_require__(997)
+var copy = __webpack_require__(555)
+var RunQueue = __webpack_require__(34)
+var extend = Object.assign || __webpack_require__(669)._extend
- Object.defineProperty(XMLNode.prototype, 'nodeName', {
- get: function() {
- return this.name;
- }
- });
+function promisify (Promise, fn) {
+ return function () {
+ var args = [].slice.call(arguments)
+ return new Promise(function (resolve, reject) {
+ return fn.apply(null, args.concat(function (err, value) {
+ if (err) {
+ reject(err)
+ } else {
+ resolve(value)
+ }
+ }))
+ })
+ }
+}
- Object.defineProperty(XMLNode.prototype, 'nodeType', {
- get: function() {
- return this.type;
- }
- });
+function move (from, to, opts) {
+ validate('SSO|SS', arguments)
+ opts = extend({}, opts || {})
- Object.defineProperty(XMLNode.prototype, 'nodeValue', {
- get: function() {
- return this.value;
- }
- });
+ var Promise = opts.Promise || global.Promise
+ var fs = opts.fs || nodeFs
+ var rimrafAsync = promisify(Promise, rimraf)
+ var renameAsync = promisify(Promise, fs.rename)
- Object.defineProperty(XMLNode.prototype, 'parentNode', {
- get: function() {
- return this.parent;
- }
- });
+ opts.top = from
- Object.defineProperty(XMLNode.prototype, 'childNodes', {
- get: function() {
- if (!this.childNodeList || !this.childNodeList.nodes) {
- this.childNodeList = new XMLNodeList(this.children);
- }
- return this.childNodeList;
- }
- });
+ var queue = new RunQueue({
+ maxConcurrency: opts.maxConcurrency,
+ Promise: Promise
+ })
+ opts.queue = queue
+ opts.recurseWith = rename
- Object.defineProperty(XMLNode.prototype, 'firstChild', {
- get: function() {
- return this.children[0] || null;
- }
- });
+ queue.add(0, rename, [from, to, opts])
- Object.defineProperty(XMLNode.prototype, 'lastChild', {
- get: function() {
- return this.children[this.children.length - 1] || null;
- }
- });
+ return queue.run().then(function () {
+ return remove(from)
+ }, function (err) {
+ // if the target already exists don't clobber it
+ if (err.code === 'EEXIST' || err.code === 'EPERM') {
+ return passThroughError()
+ } else {
+ return remove(to).then(passThroughError, passThroughError)
+ }
+ function passThroughError () {
+ return Promise.reject(err)
+ }
+ })
- Object.defineProperty(XMLNode.prototype, 'previousSibling', {
- get: function() {
- var i;
- i = this.parent.children.indexOf(this);
- return this.parent.children[i - 1] || null;
- }
- });
+ function remove (target) {
+ var opts = {
+ unlink: fs.unlink,
+ chmod: fs.chmod,
+ stat: fs.stat,
+ lstat: fs.lstat,
+ rmdir: fs.rmdir,
+ readdir: fs.readdir,
+ glob: false
+ }
+ return rimrafAsync(target, opts)
+ }
- Object.defineProperty(XMLNode.prototype, 'nextSibling', {
- get: function() {
- var i;
- i = this.parent.children.indexOf(this);
- return this.parent.children[i + 1] || null;
+ function rename (from, to, opts, done) {
+ return renameAsync(from, to).catch(function (err) {
+ if (err.code !== 'EXDEV') {
+ return Promise.reject(err)
+ } else {
+ return remove(to).then(function () {
+ return copy.item(from, to, opts)
+ })
}
- });
+ })
+ }
+}
- Object.defineProperty(XMLNode.prototype, 'ownerDocument', {
- get: function() {
- return this.document() || null;
- }
- });
- Object.defineProperty(XMLNode.prototype, 'textContent', {
- get: function() {
- var child, j, len, ref2, str;
- if (this.nodeType === NodeType.Element || this.nodeType === NodeType.DocumentFragment) {
- str = '';
- ref2 = this.children;
- for (j = 0, len = ref2.length; j < len; j++) {
- child = ref2[j];
- if (child.textContent) {
- str += child.textContent;
- }
- }
- return str;
- } else {
- return null;
- }
- },
- set: function(value) {
- throw new Error("This DOM method is not implemented." + this.debugInfo());
- }
- });
+/***/ }),
+/* 185 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
- XMLNode.prototype.setParent = function(parent) {
- var child, j, len, ref2, results;
- this.parent = parent;
- if (parent) {
- this.options = parent.options;
- this.stringify = parent.stringify;
- }
- ref2 = this.children;
- results = [];
- for (j = 0, len = ref2.length; j < len; j++) {
- child = ref2[j];
- results.push(child.setParent(this));
- }
- return results;
- };
+"use strict";
- XMLNode.prototype.element = function(name, attributes, text) {
- var childNode, item, j, k, key, lastChild, len, len1, ref2, ref3, val;
- lastChild = null;
- if (attributes === null && (text == null)) {
- ref2 = [{}, null], attributes = ref2[0], text = ref2[1];
- }
- if (attributes == null) {
- attributes = {};
- }
- attributes = getValue(attributes);
- if (!isObject(attributes)) {
- ref3 = [attributes, text], text = ref3[0], attributes = ref3[1];
- }
- if (name != null) {
- name = getValue(name);
- }
- if (Array.isArray(name)) {
- for (j = 0, len = name.length; j < len; j++) {
- item = name[j];
- lastChild = this.element(item);
- }
- } else if (isFunction(name)) {
- lastChild = this.element(name.apply());
- } else if (isObject(name)) {
- for (key in name) {
- if (!hasProp.call(name, key)) continue;
- val = name[key];
- if (isFunction(val)) {
- val = val.apply();
- }
- if (!this.options.ignoreDecorators && this.stringify.convertAttKey && key.indexOf(this.stringify.convertAttKey) === 0) {
- lastChild = this.attribute(key.substr(this.stringify.convertAttKey.length), val);
- } else if (!this.options.separateArrayItems && Array.isArray(val) && isEmpty(val)) {
- lastChild = this.dummy();
- } else if (isObject(val) && isEmpty(val)) {
- lastChild = this.element(key);
- } else if (!this.options.keepNullNodes && (val == null)) {
- lastChild = this.dummy();
- } else if (!this.options.separateArrayItems && Array.isArray(val)) {
- for (k = 0, len1 = val.length; k < len1; k++) {
- item = val[k];
- childNode = {};
- childNode[key] = item;
- lastChild = this.element(childNode);
- }
- } else if (isObject(val)) {
- if (!this.options.ignoreDecorators && this.stringify.convertTextKey && key.indexOf(this.stringify.convertTextKey) === 0) {
- lastChild = this.element(val);
- } else {
- lastChild = this.element(key);
- lastChild.element(val);
- }
- } else {
- lastChild = this.element(key, val);
- }
- }
- } else if (!this.options.keepNullNodes && text === null) {
- lastChild = this.dummy();
- } else {
- if (!this.options.ignoreDecorators && this.stringify.convertTextKey && name.indexOf(this.stringify.convertTextKey) === 0) {
- lastChild = this.text(text);
- } else if (!this.options.ignoreDecorators && this.stringify.convertCDataKey && name.indexOf(this.stringify.convertCDataKey) === 0) {
- lastChild = this.cdata(text);
- } else if (!this.options.ignoreDecorators && this.stringify.convertCommentKey && name.indexOf(this.stringify.convertCommentKey) === 0) {
- lastChild = this.comment(text);
- } else if (!this.options.ignoreDecorators && this.stringify.convertRawKey && name.indexOf(this.stringify.convertRawKey) === 0) {
- lastChild = this.raw(text);
- } else if (!this.options.ignoreDecorators && this.stringify.convertPIKey && name.indexOf(this.stringify.convertPIKey) === 0) {
- lastChild = this.instruction(name.substr(this.stringify.convertPIKey.length), text);
- } else {
- lastChild = this.node(name, attributes, text);
- }
- }
- if (lastChild == null) {
- throw new Error("Could not create any elements with: " + name + ". " + this.debugInfo());
- }
- return lastChild;
- };
- XMLNode.prototype.insertBefore = function(name, attributes, text) {
- var child, i, newChild, refChild, removed;
- if (name != null ? name.type : void 0) {
- newChild = name;
- refChild = attributes;
- newChild.setParent(this);
- if (refChild) {
- i = children.indexOf(refChild);
- removed = children.splice(i);
- children.push(newChild);
- Array.prototype.push.apply(children, removed);
- } else {
- children.push(newChild);
- }
- return newChild;
- } else {
- if (this.isRoot) {
- throw new Error("Cannot insert elements at root level. " + this.debugInfo(name));
- }
- i = this.parent.children.indexOf(this);
- removed = this.parent.children.splice(i);
- child = this.parent.element(name, attributes, text);
- Array.prototype.push.apply(this.parent.children, removed);
- return child;
- }
- };
+const BB = __webpack_require__(900)
- XMLNode.prototype.insertAfter = function(name, attributes, text) {
- var child, i, removed;
- if (this.isRoot) {
- throw new Error("Cannot insert elements at root level. " + this.debugInfo(name));
- }
- i = this.parent.children.indexOf(this);
- removed = this.parent.children.splice(i + 1);
- child = this.parent.element(name, attributes, text);
- Array.prototype.push.apply(this.parent.children, removed);
- return child;
- };
+const contentPath = __webpack_require__(969)
+const figgyPudding = __webpack_require__(965)
+const fs = __webpack_require__(598)
+const PassThrough = __webpack_require__(794).PassThrough
+const pipe = BB.promisify(__webpack_require__(371).pipe)
+const ssri = __webpack_require__(951)
+const Y = __webpack_require__(945)
- XMLNode.prototype.remove = function() {
- var i, ref2;
- if (this.isRoot) {
- throw new Error("Cannot remove the root element. " + this.debugInfo());
- }
- i = this.parent.children.indexOf(this);
- [].splice.apply(this.parent.children, [i, i - i + 1].concat(ref2 = [])), ref2;
- return this.parent;
- };
+const lstatAsync = BB.promisify(fs.lstat)
+const readFileAsync = BB.promisify(fs.readFile)
- XMLNode.prototype.node = function(name, attributes, text) {
- var child, ref2;
- if (name != null) {
- name = getValue(name);
- }
- attributes || (attributes = {});
- attributes = getValue(attributes);
- if (!isObject(attributes)) {
- ref2 = [attributes, text], text = ref2[0], attributes = ref2[1];
- }
- child = new XMLElement(this, name, attributes);
- if (text != null) {
- child.text(text);
- }
- this.children.push(child);
- return child;
- };
+const ReadOpts = figgyPudding({
+ size: {}
+})
- XMLNode.prototype.text = function(value) {
- var child;
- if (isObject(value)) {
- this.element(value);
+module.exports = read
+function read (cache, integrity, opts) {
+ opts = ReadOpts(opts)
+ return withContentSri(cache, integrity, (cpath, sri) => {
+ return readFileAsync(cpath, null).then(data => {
+ if (typeof opts.size === 'number' && opts.size !== data.length) {
+ throw sizeError(opts.size, data.length)
+ } else if (ssri.checkData(data, sri)) {
+ return data
+ } else {
+ throw integrityError(sri, cpath)
}
- child = new XMLText(this, value);
- this.children.push(child);
- return this;
- };
-
- XMLNode.prototype.cdata = function(value) {
- var child;
- child = new XMLCData(this, value);
- this.children.push(child);
- return this;
- };
+ })
+ })
+}
- XMLNode.prototype.comment = function(value) {
- var child;
- child = new XMLComment(this, value);
- this.children.push(child);
- return this;
- };
+module.exports.sync = readSync
+function readSync (cache, integrity, opts) {
+ opts = ReadOpts(opts)
+ return withContentSriSync(cache, integrity, (cpath, sri) => {
+ const data = fs.readFileSync(cpath)
+ if (typeof opts.size === 'number' && opts.size !== data.length) {
+ throw sizeError(opts.size, data.length)
+ } else if (ssri.checkData(data, sri)) {
+ return data
+ } else {
+ throw integrityError(sri, cpath)
+ }
+ })
+}
- XMLNode.prototype.commentBefore = function(value) {
- var child, i, removed;
- i = this.parent.children.indexOf(this);
- removed = this.parent.children.splice(i);
- child = this.parent.comment(value);
- Array.prototype.push.apply(this.parent.children, removed);
- return this;
- };
+module.exports.stream = readStream
+module.exports.readStream = readStream
+function readStream (cache, integrity, opts) {
+ opts = ReadOpts(opts)
+ const stream = new PassThrough()
+ withContentSri(cache, integrity, (cpath, sri) => {
+ return lstatAsync(cpath).then(stat => ({ cpath, sri, stat }))
+ }).then(({ cpath, sri, stat }) => {
+ return pipe(
+ fs.createReadStream(cpath),
+ ssri.integrityStream({
+ integrity: sri,
+ size: opts.size
+ }),
+ stream
+ )
+ }).catch(err => {
+ stream.emit('error', err)
+ })
+ return stream
+}
- XMLNode.prototype.commentAfter = function(value) {
- var child, i, removed;
- i = this.parent.children.indexOf(this);
- removed = this.parent.children.splice(i + 1);
- child = this.parent.comment(value);
- Array.prototype.push.apply(this.parent.children, removed);
- return this;
- };
+let copyFileAsync
+if (fs.copyFile) {
+ module.exports.copy = copy
+ module.exports.copy.sync = copySync
+ copyFileAsync = BB.promisify(fs.copyFile)
+}
- XMLNode.prototype.raw = function(value) {
- var child;
- child = new XMLRaw(this, value);
- this.children.push(child);
- return this;
- };
+function copy (cache, integrity, dest, opts) {
+ opts = ReadOpts(opts)
+ return withContentSri(cache, integrity, (cpath, sri) => {
+ return copyFileAsync(cpath, dest)
+ })
+}
- XMLNode.prototype.dummy = function() {
- var child;
- child = new XMLDummy(this);
- return child;
- };
+function copySync (cache, integrity, dest, opts) {
+ opts = ReadOpts(opts)
+ return withContentSriSync(cache, integrity, (cpath, sri) => {
+ return fs.copyFileSync(cpath, dest)
+ })
+}
- XMLNode.prototype.instruction = function(target, value) {
- var insTarget, insValue, instruction, j, len;
- if (target != null) {
- target = getValue(target);
- }
- if (value != null) {
- value = getValue(value);
+module.exports.hasContent = hasContent
+function hasContent (cache, integrity) {
+ if (!integrity) { return BB.resolve(false) }
+ return withContentSri(cache, integrity, (cpath, sri) => {
+ return lstatAsync(cpath).then(stat => ({ size: stat.size, sri, stat }))
+ }).catch(err => {
+ if (err.code === 'ENOENT') { return false }
+ if (err.code === 'EPERM') {
+ if (process.platform !== 'win32') {
+ throw err
+ } else {
+ return false
}
- if (Array.isArray(target)) {
- for (j = 0, len = target.length; j < len; j++) {
- insTarget = target[j];
- this.instruction(insTarget);
- }
- } else if (isObject(target)) {
- for (insTarget in target) {
- if (!hasProp.call(target, insTarget)) continue;
- insValue = target[insTarget];
- this.instruction(insTarget, insValue);
- }
- } else {
- if (isFunction(value)) {
- value = value.apply();
- }
- instruction = new XMLProcessingInstruction(this, target, value);
- this.children.push(instruction);
- }
- return this;
- };
-
- XMLNode.prototype.instructionBefore = function(target, value) {
- var child, i, removed;
- i = this.parent.children.indexOf(this);
- removed = this.parent.children.splice(i);
- child = this.parent.instruction(target, value);
- Array.prototype.push.apply(this.parent.children, removed);
- return this;
- };
-
- XMLNode.prototype.instructionAfter = function(target, value) {
- var child, i, removed;
- i = this.parent.children.indexOf(this);
- removed = this.parent.children.splice(i + 1);
- child = this.parent.instruction(target, value);
- Array.prototype.push.apply(this.parent.children, removed);
- return this;
- };
-
- XMLNode.prototype.declaration = function(version, encoding, standalone) {
- var doc, xmldec;
- doc = this.document();
- xmldec = new XMLDeclaration(doc, version, encoding, standalone);
- if (doc.children.length === 0) {
- doc.children.unshift(xmldec);
- } else if (doc.children[0].type === NodeType.Declaration) {
- doc.children[0] = xmldec;
- } else {
- doc.children.unshift(xmldec);
- }
- return doc.root() || doc;
- };
+ }
+ })
+}
- XMLNode.prototype.dtd = function(pubID, sysID) {
- var child, doc, doctype, i, j, k, len, len1, ref2, ref3;
- doc = this.document();
- doctype = new XMLDocType(doc, pubID, sysID);
- ref2 = doc.children;
- for (i = j = 0, len = ref2.length; j < len; i = ++j) {
- child = ref2[i];
- if (child.type === NodeType.DocType) {
- doc.children[i] = doctype;
- return doctype;
- }
- }
- ref3 = doc.children;
- for (i = k = 0, len1 = ref3.length; k < len1; i = ++k) {
- child = ref3[i];
- if (child.isRoot) {
- doc.children.splice(i, 0, doctype);
- return doctype;
+module.exports.hasContent.sync = hasContentSync
+function hasContentSync (cache, integrity) {
+ if (!integrity) { return false }
+ return withContentSriSync(cache, integrity, (cpath, sri) => {
+ try {
+ const stat = fs.lstatSync(cpath)
+ return { size: stat.size, sri, stat }
+ } catch (err) {
+ if (err.code === 'ENOENT') { return false }
+ if (err.code === 'EPERM') {
+ if (process.platform !== 'win32') {
+ throw err
+ } else {
+ return false
}
}
- doc.children.push(doctype);
- return doctype;
- };
+ }
+ })
+}
- XMLNode.prototype.up = function() {
- if (this.isRoot) {
- throw new Error("The root node has no parent. Use doc() if you need to get the document object.");
- }
- return this.parent;
- };
+function withContentSri (cache, integrity, fn) {
+ return BB.try(() => {
+ const sri = ssri.parse(integrity)
+ // If `integrity` has multiple entries, pick the first digest
+ // with available local data.
+ const algo = sri.pickAlgorithm()
+ const digests = sri[algo]
+ if (digests.length <= 1) {
+ const cpath = contentPath(cache, digests[0])
+ return fn(cpath, digests[0])
+ } else {
+ return BB.any(sri[sri.pickAlgorithm()].map(meta => {
+ return withContentSri(cache, meta, fn)
+ }, { concurrency: 1 }))
+ .catch(err => {
+ if ([].some.call(err, e => e.code === 'ENOENT')) {
+ throw Object.assign(
+ new Error('No matching content found for ' + sri.toString()),
+ { code: 'ENOENT' }
+ )
+ } else {
+ throw err[0]
+ }
+ })
+ }
+ })
+}
- XMLNode.prototype.root = function() {
- var node;
- node = this;
- while (node) {
- if (node.type === NodeType.Document) {
- return node.rootObject;
- } else if (node.isRoot) {
- return node;
- } else {
- node = node.parent;
- }
+function withContentSriSync (cache, integrity, fn) {
+ const sri = ssri.parse(integrity)
+ // If `integrity` has multiple entries, pick the first digest
+ // with available local data.
+ const algo = sri.pickAlgorithm()
+ const digests = sri[algo]
+ if (digests.length <= 1) {
+ const cpath = contentPath(cache, digests[0])
+ return fn(cpath, digests[0])
+ } else {
+ let lastErr = null
+ for (const meta of sri[sri.pickAlgorithm()]) {
+ try {
+ return withContentSriSync(cache, meta, fn)
+ } catch (err) {
+ lastErr = err
}
- };
+ }
+ if (lastErr) { throw lastErr }
+ }
+}
- XMLNode.prototype.document = function() {
- var node;
- node = this;
- while (node) {
- if (node.type === NodeType.Document) {
- return node;
- } else {
- node = node.parent;
- }
- }
- };
+function sizeError (expected, found) {
+ var err = new Error(Y`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+ err.expected = expected
+ err.found = found
+ err.code = 'EBADSIZE'
+ return err
+}
- XMLNode.prototype.end = function(options) {
- return this.document().end(options);
- };
+function integrityError (sri, path) {
+ var err = new Error(Y`Integrity verification failed for ${sri} (${path})`)
+ err.code = 'EINTEGRITY'
+ err.sri = sri
+ err.path = path
+ return err
+}
- XMLNode.prototype.prev = function() {
- var i;
- i = this.parent.children.indexOf(this);
- if (i < 1) {
- throw new Error("Already at the first node. " + this.debugInfo());
- }
- return this.parent.children[i - 1];
- };
- XMLNode.prototype.next = function() {
- var i;
- i = this.parent.children.indexOf(this);
- if (i === -1 || i === this.parent.children.length - 1) {
- throw new Error("Already at the last node. " + this.debugInfo());
- }
- return this.parent.children[i + 1];
- };
+/***/ }),
+/* 186 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
- XMLNode.prototype.importDocument = function(doc) {
- var clonedRoot;
- clonedRoot = doc.root().clone();
- clonedRoot.parent = this;
- clonedRoot.isRoot = false;
- this.children.push(clonedRoot);
- return this;
- };
+"use strict";
- XMLNode.prototype.debugInfo = function(name) {
- var ref2, ref3;
- name = name || this.name;
- if ((name == null) && !((ref2 = this.parent) != null ? ref2.name : void 0)) {
- return "";
- } else if (name == null) {
- return "parent: <" + this.parent.name + ">";
- } else if (!((ref3 = this.parent) != null ? ref3.name : void 0)) {
- return "node: <" + name + ">";
- } else {
- return "node: <" + name + ">, parent: <" + this.parent.name + ">";
- }
- };
- XMLNode.prototype.ele = function(name, attributes, text) {
- return this.element(name, attributes, text);
- };
+const BB = __webpack_require__(900)
- XMLNode.prototype.nod = function(name, attributes, text) {
- return this.node(name, attributes, text);
- };
+const contentPath = __webpack_require__(969)
+const fixOwner = __webpack_require__(133)
+const fs = __webpack_require__(598)
+const moveFile = __webpack_require__(201)
+const PassThrough = __webpack_require__(794).PassThrough
+const path = __webpack_require__(622)
+const pipe = BB.promisify(__webpack_require__(371).pipe)
+const rimraf = BB.promisify(__webpack_require__(342))
+const ssri = __webpack_require__(951)
+const to = __webpack_require__(371).to
+const uniqueFilename = __webpack_require__(94)
+const Y = __webpack_require__(945)
- XMLNode.prototype.txt = function(value) {
- return this.text(value);
- };
+const writeFileAsync = BB.promisify(fs.writeFile)
- XMLNode.prototype.dat = function(value) {
- return this.cdata(value);
- };
+module.exports = write
+function write (cache, data, opts) {
+ opts = opts || {}
+ if (opts.algorithms && opts.algorithms.length > 1) {
+ throw new Error(
+ Y`opts.algorithms only supports a single algorithm for now`
+ )
+ }
+ if (typeof opts.size === 'number' && data.length !== opts.size) {
+ return BB.reject(sizeError(opts.size, data.length))
+ }
+ const sri = ssri.fromData(data, {
+ algorithms: opts.algorithms
+ })
+ if (opts.integrity && !ssri.checkData(data, opts.integrity, opts)) {
+ return BB.reject(checksumError(opts.integrity, sri))
+ }
+ return BB.using(makeTmp(cache, opts), tmp => (
+ writeFileAsync(
+ tmp.target, data, { flag: 'wx' }
+ ).then(() => (
+ moveToDestination(tmp, cache, sri, opts)
+ ))
+ )).then(() => ({ integrity: sri, size: data.length }))
+}
- XMLNode.prototype.com = function(value) {
- return this.comment(value);
- };
+module.exports.stream = writeStream
+function writeStream (cache, opts) {
+ opts = opts || {}
+ const inputStream = new PassThrough()
+ let inputErr = false
+ function errCheck () {
+ if (inputErr) { throw inputErr }
+ }
- XMLNode.prototype.ins = function(target, value) {
- return this.instruction(target, value);
- };
+ let allDone
+ const ret = to((c, n, cb) => {
+ if (!allDone) {
+ allDone = handleContent(inputStream, cache, opts, errCheck)
+ }
+ inputStream.write(c, n, cb)
+ }, cb => {
+ inputStream.end(() => {
+ if (!allDone) {
+ const e = new Error(Y`Cache input stream was empty`)
+ e.code = 'ENODATA'
+ return ret.emit('error', e)
+ }
+ allDone.then(res => {
+ res.integrity && ret.emit('integrity', res.integrity)
+ res.size !== null && ret.emit('size', res.size)
+ cb()
+ }, e => {
+ ret.emit('error', e)
+ })
+ })
+ })
+ ret.once('error', e => {
+ inputErr = e
+ })
+ return ret
+}
- XMLNode.prototype.doc = function() {
- return this.document();
- };
+function handleContent (inputStream, cache, opts, errCheck) {
+ return BB.using(makeTmp(cache, opts), tmp => {
+ errCheck()
+ return pipeToTmp(
+ inputStream, cache, tmp.target, opts, errCheck
+ ).then(res => {
+ return moveToDestination(
+ tmp, cache, res.integrity, opts, errCheck
+ ).then(() => res)
+ })
+ })
+}
- XMLNode.prototype.dec = function(version, encoding, standalone) {
- return this.declaration(version, encoding, standalone);
- };
+function pipeToTmp (inputStream, cache, tmpTarget, opts, errCheck) {
+ return BB.resolve().then(() => {
+ let integrity
+ let size
+ const hashStream = ssri.integrityStream({
+ integrity: opts.integrity,
+ algorithms: opts.algorithms,
+ size: opts.size
+ }).on('integrity', s => {
+ integrity = s
+ }).on('size', s => {
+ size = s
+ })
+ const outStream = fs.createWriteStream(tmpTarget, {
+ flags: 'wx'
+ })
+ errCheck()
+ return pipe(inputStream, hashStream, outStream).then(() => {
+ return { integrity, size }
+ }).catch(err => {
+ return rimraf(tmpTarget).then(() => { throw err })
+ })
+ })
+}
- XMLNode.prototype.e = function(name, attributes, text) {
- return this.element(name, attributes, text);
- };
+function makeTmp (cache, opts) {
+ const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
+ return fixOwner.mkdirfix(
+ cache, path.dirname(tmpTarget)
+ ).then(() => ({
+ target: tmpTarget,
+ moved: false
+ })).disposer(tmp => (!tmp.moved && rimraf(tmp.target)))
+}
- XMLNode.prototype.n = function(name, attributes, text) {
- return this.node(name, attributes, text);
- };
+function moveToDestination (tmp, cache, sri, opts, errCheck) {
+ errCheck && errCheck()
+ const destination = contentPath(cache, sri)
+ const destDir = path.dirname(destination)
- XMLNode.prototype.t = function(value) {
- return this.text(value);
- };
+ return fixOwner.mkdirfix(
+ cache, destDir
+ ).then(() => {
+ errCheck && errCheck()
+ return moveFile(tmp.target, destination)
+ }).then(() => {
+ errCheck && errCheck()
+ tmp.moved = true
+ return fixOwner.chownr(cache, destination)
+ })
+}
- XMLNode.prototype.d = function(value) {
- return this.cdata(value);
- };
+function sizeError (expected, found) {
+ var err = new Error(Y`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+ err.expected = expected
+ err.found = found
+ err.code = 'EBADSIZE'
+ return err
+}
- XMLNode.prototype.c = function(value) {
- return this.comment(value);
- };
+function checksumError (expected, found) {
+ var err = new Error(Y`Integrity check failed:
+ Wanted: ${expected}
+ Found: ${found}`)
+ err.code = 'EINTEGRITY'
+ err.expected = expected
+ err.found = found
+ return err
+}
- XMLNode.prototype.r = function(value) {
- return this.raw(value);
- };
- XMLNode.prototype.i = function(target, value) {
- return this.instruction(target, value);
- };
+/***/ }),
+/* 187 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
- XMLNode.prototype.u = function() {
- return this.up();
- };
+"use strict";
- XMLNode.prototype.importXMLBuilder = function(doc) {
- return this.importDocument(doc);
- };
+var EventEmitter = __webpack_require__(614).EventEmitter
+var util = __webpack_require__(669)
- XMLNode.prototype.replaceChild = function(newChild, oldChild) {
- throw new Error("This DOM method is not implemented." + this.debugInfo());
- };
+var trackerId = 0
+var TrackerBase = module.exports = function (name) {
+ EventEmitter.call(this)
+ this.id = ++trackerId
+ this.name = name
+}
+util.inherits(TrackerBase, EventEmitter)
- XMLNode.prototype.removeChild = function(oldChild) {
- throw new Error("This DOM method is not implemented." + this.debugInfo());
- };
- XMLNode.prototype.appendChild = function(newChild) {
- throw new Error("This DOM method is not implemented." + this.debugInfo());
- };
+/***/ }),
+/* 188 */
+/***/ (function(module) {
- XMLNode.prototype.hasChildNodes = function() {
- return this.children.length !== 0;
- };
+/**
+ * @preserve
+ * JS Implementation of incremental MurmurHash3 (r150) (as of May 10, 2013)
+ *
+ * @author Jens Taylor
+ * @see http://github.com/homebrewing/brauhaus-diff
+ * @author Gary Court
+ * @see http://github.com/garycourt/murmurhash-js
+ * @author Austin Appleby
+ * @see http://sites.google.com/site/murmurhash/
+ */
+(function(){
+ var cache;
- XMLNode.prototype.cloneNode = function(deep) {
- throw new Error("This DOM method is not implemented." + this.debugInfo());
- };
+ // Call this function without `new` to use the cached object (good for
+ // single-threaded environments), or with `new` to create a new object.
+ //
+ // @param {string} key A UTF-16 or ASCII string
+ // @param {number} seed An optional positive integer
+ // @return {object} A MurmurHash3 object for incremental hashing
+ function MurmurHash3(key, seed) {
+ var m = this instanceof MurmurHash3 ? this : cache;
+ m.reset(seed)
+ if (typeof key === 'string' && key.length > 0) {
+ m.hash(key);
+ }
- XMLNode.prototype.normalize = function() {
- throw new Error("This DOM method is not implemented." + this.debugInfo());
+ if (m !== this) {
+ return m;
+ }
};
- XMLNode.prototype.isSupported = function(feature, version) {
- return true;
- };
+ // Incrementally add a string to this hash
+ //
+ // @param {string} key A UTF-16 or ASCII string
+ // @return {object} this
+ MurmurHash3.prototype.hash = function(key) {
+ var h1, k1, i, top, len;
- XMLNode.prototype.hasAttributes = function() {
- return this.attribs.length !== 0;
- };
+ len = key.length;
+ this.len += len;
- XMLNode.prototype.compareDocumentPosition = function(other) {
- var ref, res;
- ref = this;
- if (ref === other) {
- return 0;
- } else if (this.document() !== other.document()) {
- res = DocumentPosition.Disconnected | DocumentPosition.ImplementationSpecific;
- if (Math.random() < 0.5) {
- res |= DocumentPosition.Preceding;
- } else {
- res |= DocumentPosition.Following;
- }
- return res;
- } else if (ref.isAncestor(other)) {
- return DocumentPosition.Contains | DocumentPosition.Preceding;
- } else if (ref.isDescendant(other)) {
- return DocumentPosition.Contains | DocumentPosition.Following;
- } else if (ref.isPreceding(other)) {
- return DocumentPosition.Preceding;
- } else {
- return DocumentPosition.Following;
- }
- };
+ k1 = this.k1;
+ i = 0;
+ switch (this.rem) {
+ case 0: k1 ^= len > i ? (key.charCodeAt(i++) & 0xffff) : 0;
+ case 1: k1 ^= len > i ? (key.charCodeAt(i++) & 0xffff) << 8 : 0;
+ case 2: k1 ^= len > i ? (key.charCodeAt(i++) & 0xffff) << 16 : 0;
+ case 3:
+ k1 ^= len > i ? (key.charCodeAt(i) & 0xff) << 24 : 0;
+ k1 ^= len > i ? (key.charCodeAt(i++) & 0xff00) >> 8 : 0;
+ }
- XMLNode.prototype.isSameNode = function(other) {
- throw new Error("This DOM method is not implemented." + this.debugInfo());
- };
+ this.rem = (len + this.rem) & 3; // & 3 is same as % 4
+ len -= this.rem;
+ if (len > 0) {
+ h1 = this.h1;
+ while (1) {
+ k1 = (k1 * 0x2d51 + (k1 & 0xffff) * 0xcc9e0000) & 0xffffffff;
+ k1 = (k1 << 15) | (k1 >>> 17);
+ k1 = (k1 * 0x3593 + (k1 & 0xffff) * 0x1b870000) & 0xffffffff;
- XMLNode.prototype.lookupPrefix = function(namespaceURI) {
- throw new Error("This DOM method is not implemented." + this.debugInfo());
- };
+ h1 ^= k1;
+ h1 = (h1 << 13) | (h1 >>> 19);
+ h1 = (h1 * 5 + 0xe6546b64) & 0xffffffff;
- XMLNode.prototype.isDefaultNamespace = function(namespaceURI) {
- throw new Error("This DOM method is not implemented." + this.debugInfo());
- };
+ if (i >= len) {
+ break;
+ }
- XMLNode.prototype.lookupNamespaceURI = function(prefix) {
- throw new Error("This DOM method is not implemented." + this.debugInfo());
- };
+ k1 = ((key.charCodeAt(i++) & 0xffff)) ^
+ ((key.charCodeAt(i++) & 0xffff) << 8) ^
+ ((key.charCodeAt(i++) & 0xffff) << 16);
+ top = key.charCodeAt(i++);
+ k1 ^= ((top & 0xff) << 24) ^
+ ((top & 0xff00) >> 8);
+ }
- XMLNode.prototype.isEqualNode = function(node) {
- var i, j, ref2;
- if (node.nodeType !== this.nodeType) {
- return false;
- }
- if (node.children.length !== this.children.length) {
- return false;
- }
- for (i = j = 0, ref2 = this.children.length - 1; 0 <= ref2 ? j <= ref2 : j >= ref2; i = 0 <= ref2 ? ++j : --j) {
- if (!this.children[i].isEqualNode(node.children[i])) {
- return false;
+ k1 = 0;
+ switch (this.rem) {
+ case 3: k1 ^= (key.charCodeAt(i + 2) & 0xffff) << 16;
+ case 2: k1 ^= (key.charCodeAt(i + 1) & 0xffff) << 8;
+ case 1: k1 ^= (key.charCodeAt(i) & 0xffff);
+ }
+
+ this.h1 = h1;
}
- }
- return true;
- };
- XMLNode.prototype.getFeature = function(feature, version) {
- throw new Error("This DOM method is not implemented." + this.debugInfo());
+ this.k1 = k1;
+ return this;
};
- XMLNode.prototype.setUserData = function(key, data, handler) {
- throw new Error("This DOM method is not implemented." + this.debugInfo());
- };
+ // Get the result of this hash
+ //
+ // @return {number} The 32-bit hash
+ MurmurHash3.prototype.result = function() {
+ var k1, h1;
+
+ k1 = this.k1;
+ h1 = this.h1;
- XMLNode.prototype.getUserData = function(key) {
- throw new Error("This DOM method is not implemented." + this.debugInfo());
- };
+ if (k1 > 0) {
+ k1 = (k1 * 0x2d51 + (k1 & 0xffff) * 0xcc9e0000) & 0xffffffff;
+ k1 = (k1 << 15) | (k1 >>> 17);
+ k1 = (k1 * 0x3593 + (k1 & 0xffff) * 0x1b870000) & 0xffffffff;
+ h1 ^= k1;
+ }
- XMLNode.prototype.contains = function(other) {
- if (!other) {
- return false;
- }
- return other === this || this.isDescendant(other);
- };
+ h1 ^= this.len;
- XMLNode.prototype.isDescendant = function(node) {
- var child, isDescendantChild, j, len, ref2;
- ref2 = this.children;
- for (j = 0, len = ref2.length; j < len; j++) {
- child = ref2[j];
- if (node === child) {
- return true;
- }
- isDescendantChild = child.isDescendant(node);
- if (isDescendantChild) {
- return true;
- }
- }
- return false;
- };
+ h1 ^= h1 >>> 16;
+ h1 = (h1 * 0xca6b + (h1 & 0xffff) * 0x85eb0000) & 0xffffffff;
+ h1 ^= h1 >>> 13;
+ h1 = (h1 * 0xae35 + (h1 & 0xffff) * 0xc2b20000) & 0xffffffff;
+ h1 ^= h1 >>> 16;
- XMLNode.prototype.isAncestor = function(node) {
- return node.isDescendant(this);
+ return h1 >>> 0;
};
- XMLNode.prototype.isPreceding = function(node) {
- var nodePos, thisPos;
- nodePos = this.treePosition(node);
- thisPos = this.treePosition(this);
- if (nodePos === -1 || thisPos === -1) {
- return false;
- } else {
- return nodePos < thisPos;
- }
+ // Reset the hash object for reuse
+ //
+ // @param {number} seed An optional positive integer
+ MurmurHash3.prototype.reset = function(seed) {
+ this.h1 = typeof seed === 'number' ? seed : 0;
+ this.rem = this.k1 = this.len = 0;
+ return this;
};
- XMLNode.prototype.isFollowing = function(node) {
- var nodePos, thisPos;
- nodePos = this.treePosition(node);
- thisPos = this.treePosition(this);
- if (nodePos === -1 || thisPos === -1) {
- return false;
- } else {
- return nodePos > thisPos;
- }
- };
+ // A cached object to use. This can be safely used if you're in a single-
+ // threaded environment, otherwise you need to create new hashes to use.
+ cache = new MurmurHash3();
- XMLNode.prototype.treePosition = function(node) {
- var found, pos;
- pos = 0;
- found = false;
- this.foreachTreeNode(this.document(), function(childNode) {
- pos++;
- if (!found && childNode === node) {
- return found = true;
- }
- });
- if (found) {
- return pos;
- } else {
- return -1;
- }
- };
+ if (true) {
+ module.exports = MurmurHash3;
+ } else {}
+}());
- XMLNode.prototype.foreachTreeNode = function(node, func) {
- var child, j, len, ref2, res;
- node || (node = this.document());
- ref2 = node.children;
- for (j = 0, len = ref2.length; j < len; j++) {
- child = ref2[j];
- if (res = func(child)) {
- return res;
- } else {
- res = this.foreachTreeNode(child, func);
- if (res) {
- return res;
- }
- }
- }
- };
- return XMLNode;
+/***/ }),
+/* 189 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
- })();
+"use strict";
-}).call(this);
+var Buffer = __webpack_require__(215).Buffer;
+// Multibyte codec. In this scheme, a character is represented by 1 or more bytes.
+// Our codec supports UTF-16 surrogates, extensions for GB18030 and unicode sequences.
+// To save memory and loading time, we read table files only when requested.
-/***/ }),
-/* 258 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+exports._dbcs = DBCSCodec;
-"use strict";
+var UNASSIGNED = -1,
+ GB18030_CODE = -2,
+ SEQ_START = -10,
+ NODE_START = -1000,
+ UNASSIGNED_NODE = new Array(0x100),
+ DEF_CHAR = -1;
+for (var i = 0; i < 0x100; i++)
+ UNASSIGNED_NODE[i] = UNASSIGNED;
-const ls = __webpack_require__(14)
-const get = __webpack_require__(425)
-const put = __webpack_require__(154)
-const rm = __webpack_require__(435)
-const verify = __webpack_require__(290)
-const setLocale = __webpack_require__(945).setLocale
-const clearMemoized = __webpack_require__(521).clearMemoized
-const tmp = __webpack_require__(862)
-setLocale('en')
+// Class DBCSCodec reads and initializes mapping tables.
+function DBCSCodec(codecOptions, iconv) {
+ this.encodingName = codecOptions.encodingName;
+ if (!codecOptions)
+ throw new Error("DBCS codec is called without the data.")
+ if (!codecOptions.table)
+ throw new Error("Encoding '" + this.encodingName + "' has no data.");
-const x = module.exports
+ // Load tables.
+ var mappingTable = codecOptions.table();
-x.ls = cache => ls(cache)
-x.ls.stream = cache => ls.stream(cache)
-x.get = (cache, key, opts) => get(cache, key, opts)
-x.get.byDigest = (cache, hash, opts) => get.byDigest(cache, hash, opts)
-x.get.sync = (cache, key, opts) => get.sync(cache, key, opts)
-x.get.sync.byDigest = (cache, key, opts) => get.sync.byDigest(cache, key, opts)
-x.get.stream = (cache, key, opts) => get.stream(cache, key, opts)
-x.get.stream.byDigest = (cache, hash, opts) => get.stream.byDigest(cache, hash, opts)
-x.get.copy = (cache, key, dest, opts) => get.copy(cache, key, dest, opts)
-x.get.copy.byDigest = (cache, hash, dest, opts) => get.copy.byDigest(cache, hash, dest, opts)
-x.get.info = (cache, key) => get.info(cache, key)
-x.get.hasContent = (cache, hash) => get.hasContent(cache, hash)
-x.get.hasContent.sync = (cache, hash) => get.hasContent.sync(cache, hash)
+ // Decode tables: MBCS -> Unicode.
-x.put = (cache, key, data, opts) => put(cache, key, data, opts)
-x.put.stream = (cache, key, opts) => put.stream(cache, key, opts)
+ // decodeTables is a trie, encoded as an array of arrays of integers. Internal arrays are trie nodes and all have len = 256.
+ // Trie root is decodeTables[0].
+ // Values: >= 0 -> unicode character code. can be > 0xFFFF
+ // == UNASSIGNED -> unknown/unassigned sequence.
+ // == GB18030_CODE -> this is the end of a GB18030 4-byte sequence.
+ // <= NODE_START -> index of the next node in our trie to process next byte.
+ // <= SEQ_START -> index of the start of a character code sequence, in decodeTableSeq.
+ this.decodeTables = [];
+ this.decodeTables[0] = UNASSIGNED_NODE.slice(0); // Create root node.
-x.rm = (cache, key) => rm.entry(cache, key)
-x.rm.all = cache => rm.all(cache)
-x.rm.entry = x.rm
-x.rm.content = (cache, hash) => rm.content(cache, hash)
+ // Sometimes a MBCS char corresponds to a sequence of unicode chars. We store them as arrays of integers here.
+ this.decodeTableSeq = [];
-x.setLocale = lang => setLocale(lang)
-x.clearMemoized = () => clearMemoized()
+ // Actual mapping tables consist of chunks. Use them to fill up decode tables.
+ for (var i = 0; i < mappingTable.length; i++)
+ this._addDecodeChunk(mappingTable[i]);
-x.tmp = {}
-x.tmp.mkdir = (cache, opts) => tmp.mkdir(cache, opts)
-x.tmp.withTmp = (cache, opts, cb) => tmp.withTmp(cache, opts, cb)
+ // Load & create GB18030 tables when needed.
+ if (typeof codecOptions.gb18030 === 'function') {
+ this.gb18030 = codecOptions.gb18030(); // Load GB18030 ranges.
-x.verify = (cache, opts) => verify(cache, opts)
-x.verify.lastRun = cache => verify.lastRun(cache)
+ // Add GB18030 common decode nodes.
+ var commonThirdByteNodeIdx = this.decodeTables.length;
+ this.decodeTables.push(UNASSIGNED_NODE.slice(0));
+ var commonFourthByteNodeIdx = this.decodeTables.length;
+ this.decodeTables.push(UNASSIGNED_NODE.slice(0));
-/***/ }),
-/* 259 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+ // Fill out the tree
+ var firstByteNode = this.decodeTables[0];
+ for (var i = 0x81; i <= 0xFE; i++) {
+ var secondByteNode = this.decodeTables[NODE_START - firstByteNode[i]];
+ for (var j = 0x30; j <= 0x39; j++) {
+ if (secondByteNode[j] === UNASSIGNED) {
+ secondByteNode[j] = NODE_START - commonThirdByteNodeIdx;
+ } else if (secondByteNode[j] > NODE_START) {
+ throw new Error("gb18030 decode tables conflict at byte 2");
+ }
-"use strict";
+ var thirdByteNode = this.decodeTables[NODE_START - secondByteNode[j]];
+ for (var k = 0x81; k <= 0xFE; k++) {
+ if (thirdByteNode[k] === UNASSIGNED) {
+ thirdByteNode[k] = NODE_START - commonFourthByteNodeIdx;
+ } else if (thirdByteNode[k] === NODE_START - commonFourthByteNodeIdx) {
+ continue;
+ } else if (thirdByteNode[k] > NODE_START) {
+ throw new Error("gb18030 decode tables conflict at byte 3");
+ }
+ var fourthByteNode = this.decodeTables[NODE_START - thirdByteNode[k]];
+ for (var l = 0x30; l <= 0x39; l++) {
+ if (fourthByteNode[l] === UNASSIGNED)
+ fourthByteNode[l] = GB18030_CODE;
+ }
+ }
+ }
+ }
+ }
-const cacache = __webpack_require__(426)
-const fetch = __webpack_require__(269)
-const pipe = __webpack_require__(371).pipe
-const ssri = __webpack_require__(951)
-const through = __webpack_require__(371).through
-const to = __webpack_require__(371).to
-const url = __webpack_require__(835)
-const stream = __webpack_require__(794)
+ this.defaultCharUnicode = iconv.defaultCharUnicode;
-const MAX_MEM_SIZE = 5 * 1024 * 1024 // 5MB
+
+ // Encode tables: Unicode -> DBCS.
-function cacheKey (req) {
- const parsed = url.parse(req.url)
- return `make-fetch-happen:request-cache:${
- url.format({
- protocol: parsed.protocol,
- slashes: parsed.slashes,
- host: parsed.host,
- hostname: parsed.hostname,
- pathname: parsed.pathname
- })
- }`
+ // `encodeTable` is array mapping from unicode char to encoded char. All its values are integers for performance.
+ // Because it can be sparse, it is represented as array of buckets by 256 chars each. Bucket can be null.
+ // Values: >= 0 -> it is a normal char. Write the value (if <=256 then 1 byte, if <=65536 then 2 bytes, etc.).
+ // == UNASSIGNED -> no conversion found. Output a default char.
+ // <= SEQ_START -> it's an index in encodeTableSeq, see below. The character starts a sequence.
+ this.encodeTable = [];
+
+ // `encodeTableSeq` is used when a sequence of unicode characters is encoded as a single code. We use a tree of
+ // objects where keys correspond to characters in sequence and leafs are the encoded dbcs values. A special DEF_CHAR key
+ // means end of sequence (needed when one sequence is a strict subsequence of another).
+ // Objects are kept separately from encodeTable to increase performance.
+ this.encodeTableSeq = [];
+
+ // Some chars can be decoded, but need not be encoded.
+ var skipEncodeChars = {};
+ if (codecOptions.encodeSkipVals)
+ for (var i = 0; i < codecOptions.encodeSkipVals.length; i++) {
+ var val = codecOptions.encodeSkipVals[i];
+ if (typeof val === 'number')
+ skipEncodeChars[val] = true;
+ else
+ for (var j = val.from; j <= val.to; j++)
+ skipEncodeChars[j] = true;
+ }
+
+ // Use decode trie to recursively fill out encode tables.
+ this._fillEncodeTable(0, 0, skipEncodeChars);
+
+ // Add more encoding pairs when needed.
+ if (codecOptions.encodeAdd) {
+ for (var uChar in codecOptions.encodeAdd)
+ if (Object.prototype.hasOwnProperty.call(codecOptions.encodeAdd, uChar))
+ this._setEncodeChar(uChar.charCodeAt(0), codecOptions.encodeAdd[uChar]);
+ }
+
+ this.defCharSB = this.encodeTable[0][iconv.defaultCharSingleByte.charCodeAt(0)];
+ if (this.defCharSB === UNASSIGNED) this.defCharSB = this.encodeTable[0]['?'];
+ if (this.defCharSB === UNASSIGNED) this.defCharSB = "?".charCodeAt(0);
}
-// This is a cacache-based implementation of the Cache standard,
-// using node-fetch.
-// docs: https://developer.mozilla.org/en-US/docs/Web/API/Cache
-//
-module.exports = class Cache {
- constructor (path, opts) {
- this._path = path
- this.Promise = (opts && opts.Promise) || Promise
- }
+DBCSCodec.prototype.encoder = DBCSEncoder;
+DBCSCodec.prototype.decoder = DBCSDecoder;
- // Returns a Promise that resolves to the response associated with the first
- // matching request in the Cache object.
- match (req, opts) {
- opts = opts || {}
- const key = cacheKey(req)
- return cacache.get.info(this._path, key).then(info => {
- return info && cacache.get.hasContent(
- this._path, info.integrity, opts
- ).then(exists => exists && info)
- }).then(info => {
- if (info && info.metadata && matchDetails(req, {
- url: info.metadata.url,
- reqHeaders: new fetch.Headers(info.metadata.reqHeaders),
- resHeaders: new fetch.Headers(info.metadata.resHeaders),
- cacheIntegrity: info.integrity,
- integrity: opts && opts.integrity
- })) {
- const resHeaders = new fetch.Headers(info.metadata.resHeaders)
- addCacheHeaders(resHeaders, this._path, key, info.integrity, info.time)
- if (req.method === 'HEAD') {
- return new fetch.Response(null, {
- url: req.url,
- headers: resHeaders,
- status: 200
- })
+// Decoder helpers
+DBCSCodec.prototype._getDecodeTrieNode = function(addr) {
+ var bytes = [];
+ for (; addr > 0; addr >>>= 8)
+ bytes.push(addr & 0xFF);
+ if (bytes.length == 0)
+ bytes.push(0);
+
+ var node = this.decodeTables[0];
+ for (var i = bytes.length-1; i > 0; i--) { // Traverse nodes deeper into the trie.
+ var val = node[bytes[i]];
+
+ if (val == UNASSIGNED) { // Create new node.
+ node[bytes[i]] = NODE_START - this.decodeTables.length;
+ this.decodeTables.push(node = UNASSIGNED_NODE.slice(0));
}
- let body
- const cachePath = this._path
- // avoid opening cache file handles until a user actually tries to
- // read from it.
- if (opts.memoize !== false && info.size > MAX_MEM_SIZE) {
- body = new stream.PassThrough()
- const realRead = body._read
- body._read = function (size) {
- body._read = realRead
- pipe(
- cacache.get.stream.byDigest(cachePath, info.integrity, {
- memoize: opts.memoize
- }),
- body,
- err => body.emit(err))
- return realRead.call(this, size)
- }
- } else {
- let readOnce = false
- // cacache is much faster at bulk reads
- body = new stream.Readable({
- read () {
- if (readOnce) return this.push(null)
- readOnce = true
- cacache.get.byDigest(cachePath, info.integrity, {
- memoize: opts.memoize
- }).then(data => {
- this.push(data)
- this.push(null)
- }, err => this.emit('error', err))
- }
- })
+ else if (val <= NODE_START) { // Existing node.
+ node = this.decodeTables[NODE_START - val];
}
- return this.Promise.resolve(new fetch.Response(body, {
- url: req.url,
- headers: resHeaders,
- status: 200,
- size: info.size
- }))
- }
- })
- }
-
- // Takes both a request and its response and adds it to the given cache.
- put (req, response, opts) {
- opts = opts || {}
- const size = response.headers.get('content-length')
- const fitInMemory = !!size && opts.memoize !== false && size < MAX_MEM_SIZE
- const ckey = cacheKey(req)
- const cacheOpts = {
- algorithms: opts.algorithms,
- metadata: {
- url: req.url,
- reqHeaders: req.headers.raw(),
- resHeaders: response.headers.raw()
- },
- size,
- memoize: fitInMemory && opts.memoize
- }
- if (req.method === 'HEAD' || response.status === 304) {
- // Update metadata without writing
- return cacache.get.info(this._path, ckey).then(info => {
- // Providing these will bypass content write
- cacheOpts.integrity = info.integrity
- addCacheHeaders(
- response.headers, this._path, ckey, info.integrity, info.time
- )
- return new this.Promise((resolve, reject) => {
- pipe(
- cacache.get.stream.byDigest(this._path, info.integrity, cacheOpts),
- cacache.put.stream(this._path, cacheKey(req), cacheOpts),
- err => err ? reject(err) : resolve(response)
- )
- })
- }).then(() => response)
+ else
+ throw new Error("Overwrite byte in " + this.encodingName + ", addr: " + addr.toString(16));
}
- let buf = []
- let bufSize = 0
- let cacheTargetStream = false
- const cachePath = this._path
- let cacheStream = to((chunk, enc, cb) => {
- if (!cacheTargetStream) {
- if (fitInMemory) {
- cacheTargetStream =
- to({highWaterMark: MAX_MEM_SIZE}, (chunk, enc, cb) => {
- buf.push(chunk)
- bufSize += chunk.length
- cb()
- }, done => {
- cacache.put(
- cachePath,
- cacheKey(req),
- Buffer.concat(buf, bufSize),
- cacheOpts
- ).then(
- () => done(),
- done
- )
- })
- } else {
- cacheTargetStream =
- cacache.put.stream(cachePath, cacheKey(req), cacheOpts)
- }
- }
- cacheTargetStream.write(chunk, enc, cb)
- }, done => {
- cacheTargetStream ? cacheTargetStream.end(done) : done()
- })
- const oldBody = response.body
- const newBody = through({highWaterMark: MAX_MEM_SIZE})
- response.body = newBody
- oldBody.once('error', err => newBody.emit('error', err))
- newBody.once('error', err => oldBody.emit('error', err))
- cacheStream.once('error', err => newBody.emit('error', err))
- pipe(oldBody, to((chunk, enc, cb) => {
- cacheStream.write(chunk, enc, () => {
- newBody.write(chunk, enc, cb)
- })
- }, done => {
- cacheStream.end(() => {
- newBody.end(() => {
- done()
- })
- })
- }), err => err && newBody.emit('error', err))
- return response
- }
+ return node;
+}
- // Finds the Cache entry whose key is the request, and if found, deletes the
- // Cache entry and returns a Promise that resolves to true. If no Cache entry
- // is found, it returns false.
- 'delete' (req, opts) {
- opts = opts || {}
- if (typeof opts.memoize === 'object') {
- if (opts.memoize.reset) {
- opts.memoize.reset()
- } else if (opts.memoize.clear) {
- opts.memoize.clear()
- } else {
- Object.keys(opts.memoize).forEach(k => {
- opts.memoize[k] = null
- })
- }
+
+DBCSCodec.prototype._addDecodeChunk = function(chunk) {
+ // First element of chunk is the hex mbcs code where we start.
+ var curAddr = parseInt(chunk[0], 16);
+
+ // Choose the decoding node where we'll write our chars.
+ var writeTable = this._getDecodeTrieNode(curAddr);
+ curAddr = curAddr & 0xFF;
+
+ // Write all other elements of the chunk to the table.
+ for (var k = 1; k < chunk.length; k++) {
+ var part = chunk[k];
+ if (typeof part === "string") { // String, write as-is.
+ for (var l = 0; l < part.length;) {
+ var code = part.charCodeAt(l++);
+ if (0xD800 <= code && code < 0xDC00) { // Decode surrogate
+ var codeTrail = part.charCodeAt(l++);
+ if (0xDC00 <= codeTrail && codeTrail < 0xE000)
+ writeTable[curAddr++] = 0x10000 + (code - 0xD800) * 0x400 + (codeTrail - 0xDC00);
+ else
+ throw new Error("Incorrect surrogate pair in " + this.encodingName + " at chunk " + chunk[0]);
+ }
+ else if (0x0FF0 < code && code <= 0x0FFF) { // Character sequence (our own encoding used)
+ var len = 0xFFF - code + 2;
+ var seq = [];
+ for (var m = 0; m < len; m++)
+ seq.push(part.charCodeAt(l++)); // Simple variation: don't support surrogates or subsequences in seq.
+
+ writeTable[curAddr++] = SEQ_START - this.decodeTableSeq.length;
+ this.decodeTableSeq.push(seq);
+ }
+ else
+ writeTable[curAddr++] = code; // Basic char
+ }
+ }
+ else if (typeof part === "number") { // Integer, meaning increasing sequence starting with prev character.
+ var charCode = writeTable[curAddr - 1] + 1;
+ for (var l = 0; l < part; l++)
+ writeTable[curAddr++] = charCode++;
+ }
+ else
+ throw new Error("Incorrect type '" + typeof part + "' given in " + this.encodingName + " at chunk " + chunk[0]);
}
- return cacache.rm.entry(
- this._path,
- cacheKey(req)
- // TODO - true/false
- ).then(() => false)
- }
+ if (curAddr > 0xFF)
+ throw new Error("Incorrect chunk in " + this.encodingName + " at addr " + chunk[0] + ": too long" + curAddr);
}
-function matchDetails (req, cached) {
- const reqUrl = url.parse(req.url)
- const cacheUrl = url.parse(cached.url)
- const vary = cached.resHeaders.get('Vary')
- // https://tools.ietf.org/html/rfc7234#section-4.1
- if (vary) {
- if (vary.match(/\*/)) {
- return false
- } else {
- const fieldsMatch = vary.split(/\s*,\s*/).every(field => {
- return cached.reqHeaders.get(field) === req.headers.get(field)
- })
- if (!fieldsMatch) {
- return false
- }
- }
- }
- if (cached.integrity) {
- return ssri.parse(cached.integrity).match(cached.cacheIntegrity)
- }
- reqUrl.hash = null
- cacheUrl.hash = null
- return url.format(reqUrl) === url.format(cacheUrl)
+// Encoder helpers
+DBCSCodec.prototype._getEncodeBucket = function(uCode) {
+ var high = uCode >> 8; // This could be > 0xFF because of astral characters.
+ if (this.encodeTable[high] === undefined)
+ this.encodeTable[high] = UNASSIGNED_NODE.slice(0); // Create bucket on demand.
+ return this.encodeTable[high];
}
-function addCacheHeaders (resHeaders, path, key, hash, time) {
- resHeaders.set('X-Local-Cache', encodeURIComponent(path))
- resHeaders.set('X-Local-Cache-Key', encodeURIComponent(key))
- resHeaders.set('X-Local-Cache-Hash', encodeURIComponent(hash))
- resHeaders.set('X-Local-Cache-Time', new Date(time).toUTCString())
+DBCSCodec.prototype._setEncodeChar = function(uCode, dbcsCode) {
+ var bucket = this._getEncodeBucket(uCode);
+ var low = uCode & 0xFF;
+ if (bucket[low] <= SEQ_START)
+ this.encodeTableSeq[SEQ_START-bucket[low]][DEF_CHAR] = dbcsCode; // There's already a sequence, set a single-char subsequence of it.
+ else if (bucket[low] == UNASSIGNED)
+ bucket[low] = dbcsCode;
}
+DBCSCodec.prototype._setEncodeSequence = function(seq, dbcsCode) {
+
+ // Get the root of character tree according to first character of the sequence.
+ var uCode = seq[0];
+ var bucket = this._getEncodeBucket(uCode);
+ var low = uCode & 0xFF;
-/***/ }),
-/* 260 */
-/***/ (function(module, exports, __webpack_require__) {
+ var node;
+ if (bucket[low] <= SEQ_START) {
+ // There's already a sequence with - use it.
+ node = this.encodeTableSeq[SEQ_START-bucket[low]];
+ }
+ else {
+ // There was no sequence object - allocate a new one.
+ node = {};
+ if (bucket[low] !== UNASSIGNED) node[DEF_CHAR] = bucket[low]; // If a char was set before - make it a single-char subsequence.
+ bucket[low] = SEQ_START - this.encodeTableSeq.length;
+ this.encodeTableSeq.push(node);
+ }
-"use strict";
+ // Traverse the character tree, allocating new nodes as needed.
+ for (var j = 1; j < seq.length-1; j++) {
+ var oldVal = node[uCode];
+ if (typeof oldVal === 'object')
+ node = oldVal;
+ else {
+ node = node[uCode] = {}
+ if (oldVal !== undefined)
+ node[DEF_CHAR] = oldVal
+ }
+ }
+ // Set the leaf to given dbcsCode.
+ uCode = seq[seq.length-1];
+ node[uCode] = dbcsCode;
+}
-exports = module.exports = lifecycle
-exports.makeEnv = makeEnv
-exports._incorrectWorkingDirectory = _incorrectWorkingDirectory
+DBCSCodec.prototype._fillEncodeTable = function(nodeIdx, prefix, skipEncodeChars) {
+ var node = this.decodeTables[nodeIdx];
+ var hasValues = false;
+ var subNodeEmpty = {};
+ for (var i = 0; i < 0x100; i++) {
+ var uCode = node[i];
+ var mbCode = prefix + i;
+ if (skipEncodeChars[mbCode])
+ continue;
-// for testing
-const platform = process.env.__TESTING_FAKE_PLATFORM__ || process.platform
-const isWindows = platform === 'win32'
-const spawn = __webpack_require__(128)
-const path = __webpack_require__(622)
-const Stream = __webpack_require__(794).Stream
-const fs = __webpack_require__(598)
-const chain = __webpack_require__(433).chain
-const uidNumber = __webpack_require__(798)
-const umask = __webpack_require__(696)
-const which = __webpack_require__(142)
-const byline = __webpack_require__(861)
-const resolveFrom = __webpack_require__(484)
+ if (uCode >= 0) {
+ this._setEncodeChar(uCode, mbCode);
+ hasValues = true;
+ } else if (uCode <= NODE_START) {
+ var subNodeIdx = NODE_START - uCode;
+ if (!subNodeEmpty[subNodeIdx]) { // Skip empty subtrees (they are too large in gb18030).
+ var newPrefix = (mbCode << 8) >>> 0; // NOTE: '>>> 0' keeps 32-bit num positive.
+ if (this._fillEncodeTable(subNodeIdx, newPrefix, skipEncodeChars))
+ hasValues = true;
+ else
+ subNodeEmpty[subNodeIdx] = true;
+ }
+ } else if (uCode <= SEQ_START) {
+ this._setEncodeSequence(this.decodeTableSeq[SEQ_START - uCode], mbCode);
+ hasValues = true;
+ }
+ }
+ return hasValues;
+}
-const DEFAULT_NODE_GYP_PATH = /*require.resolve*/( 693)
-const hookStatCache = new Map()
-let PATH = isWindows ? 'Path' : 'PATH'
-exports._pathEnvName = PATH
-const delimiter = path.delimiter
-// windows calls its path 'Path' usually, but this is not guaranteed.
-// merge them all together in the order they appear in the object.
-const mergePath = env =>
- Object.keys(env).filter(p => /^path$/i.test(p) && env[p])
- .map(p => env[p].split(delimiter))
- .reduce((set, p) => set.concat(p.filter(p => !set.includes(p))), [])
- .join(delimiter)
-exports._mergePath = mergePath
+// == Encoder ==================================================================
-const setPathEnv = (env, path) => {
- // first ensure that the canonical value is set.
- env[PATH] = path
- // also set any other case values, because windows.
- Object.keys(env)
- .filter(p => p !== PATH && /^path$/i.test(p))
- .forEach(p => { env[p] = path })
+function DBCSEncoder(options, codec) {
+ // Encoder state
+ this.leadSurrogate = -1;
+ this.seqObj = undefined;
+
+ // Static data
+ this.encodeTable = codec.encodeTable;
+ this.encodeTableSeq = codec.encodeTableSeq;
+ this.defaultCharSingleByte = codec.defCharSB;
+ this.gb18030 = codec.gb18030;
}
-exports._setPathEnv = setPathEnv
-function logid (pkg, stage) {
- return pkg._id + '~' + stage + ':'
-}
+DBCSEncoder.prototype.write = function(str) {
+ var newBuf = Buffer.alloc(str.length * (this.gb18030 ? 4 : 3)),
+ leadSurrogate = this.leadSurrogate,
+ seqObj = this.seqObj, nextChar = -1,
+ i = 0, j = 0;
-function hookStat (dir, stage, cb) {
- const hook = path.join(dir, '.hooks', stage)
- const cachedStatError = hookStatCache.get(hook)
+ while (true) {
+ // 0. Get next character.
+ if (nextChar === -1) {
+ if (i == str.length) break;
+ var uCode = str.charCodeAt(i++);
+ }
+ else {
+ var uCode = nextChar;
+ nextChar = -1;
+ }
- if (cachedStatError === undefined) {
- return fs.stat(hook, function (statError) {
- hookStatCache.set(hook, statError)
- cb(statError)
- })
- }
+ // 1. Handle surrogates.
+ if (0xD800 <= uCode && uCode < 0xE000) { // Char is one of surrogates.
+ if (uCode < 0xDC00) { // We've got lead surrogate.
+ if (leadSurrogate === -1) {
+ leadSurrogate = uCode;
+ continue;
+ } else {
+ leadSurrogate = uCode;
+ // Double lead surrogate found.
+ uCode = UNASSIGNED;
+ }
+ } else { // We've got trail surrogate.
+ if (leadSurrogate !== -1) {
+ uCode = 0x10000 + (leadSurrogate - 0xD800) * 0x400 + (uCode - 0xDC00);
+ leadSurrogate = -1;
+ } else {
+ // Incomplete surrogate pair - only trail surrogate found.
+ uCode = UNASSIGNED;
+ }
+
+ }
+ }
+ else if (leadSurrogate !== -1) {
+ // Incomplete surrogate pair - only lead surrogate found.
+ nextChar = uCode; uCode = UNASSIGNED; // Write an error, then current char.
+ leadSurrogate = -1;
+ }
- return setImmediate(() => cb(cachedStatError))
-}
+ // 2. Convert uCode character.
+ var dbcsCode = UNASSIGNED;
+ if (seqObj !== undefined && uCode != UNASSIGNED) { // We are in the middle of the sequence
+ var resCode = seqObj[uCode];
+ if (typeof resCode === 'object') { // Sequence continues.
+ seqObj = resCode;
+ continue;
-function lifecycle (pkg, stage, wd, opts) {
- return new Promise((resolve, reject) => {
- while (pkg && pkg._data) pkg = pkg._data
- if (!pkg) return reject(new Error('Invalid package data'))
+ } else if (typeof resCode == 'number') { // Sequence finished. Write it.
+ dbcsCode = resCode;
- opts.log.info('lifecycle', logid(pkg, stage), pkg._id)
- if (!pkg.scripts) pkg.scripts = {}
+ } else if (resCode == undefined) { // Current character is not part of the sequence.
- if (stage === 'prepublish' && opts.ignorePrepublish) {
- opts.log.info('lifecycle', logid(pkg, stage), 'ignored because ignore-prepublish is set to true', pkg._id)
- delete pkg.scripts.prepublish
- }
+ // Try default character for this sequence
+ resCode = seqObj[DEF_CHAR];
+ if (resCode !== undefined) {
+ dbcsCode = resCode; // Found. Write it.
+ nextChar = uCode; // Current character will be written too in the next iteration.
- hookStat(opts.dir, stage, function (statError) {
- // makeEnv is a slow operation. This guard clause prevents makeEnv being called
- // and avoids a ton of unnecessary work, and results in a major perf boost.
- if (!pkg.scripts[stage] && statError) return resolve()
+ } else {
+ // TODO: What if we have no default? (resCode == undefined)
+ // Then, we should write first char of the sequence as-is and try the rest recursively.
+ // Didn't do it for now because no encoding has this situation yet.
+ // Currently, just skip the sequence and write current char.
+ }
+ }
+ seqObj = undefined;
+ }
+ else if (uCode >= 0) { // Regular character
+ var subtable = this.encodeTable[uCode >> 8];
+ if (subtable !== undefined)
+ dbcsCode = subtable[uCode & 0xFF];
+
+ if (dbcsCode <= SEQ_START) { // Sequence start
+ seqObj = this.encodeTableSeq[SEQ_START-dbcsCode];
+ continue;
+ }
- validWd(wd || path.resolve(opts.dir, pkg.name), function (er, wd) {
- if (er) return reject(er)
+ if (dbcsCode == UNASSIGNED && this.gb18030) {
+ // Use GB18030 algorithm to find character(s) to write.
+ var idx = findIdx(this.gb18030.uChars, uCode);
+ if (idx != -1) {
+ var dbcsCode = this.gb18030.gbChars[idx] + (uCode - this.gb18030.uChars[idx]);
+ newBuf[j++] = 0x81 + Math.floor(dbcsCode / 12600); dbcsCode = dbcsCode % 12600;
+ newBuf[j++] = 0x30 + Math.floor(dbcsCode / 1260); dbcsCode = dbcsCode % 1260;
+ newBuf[j++] = 0x81 + Math.floor(dbcsCode / 10); dbcsCode = dbcsCode % 10;
+ newBuf[j++] = 0x30 + dbcsCode;
+ continue;
+ }
+ }
+ }
- if ((wd.indexOf(opts.dir) !== 0 || _incorrectWorkingDirectory(wd, pkg)) &&
- !opts.unsafePerm && pkg.scripts[stage]) {
- opts.log.warn('lifecycle', logid(pkg, stage), 'cannot run in wd', pkg._id, pkg.scripts[stage], `(wd=${wd})`)
- return resolve()
+ // 3. Write dbcsCode character.
+ if (dbcsCode === UNASSIGNED)
+ dbcsCode = this.defaultCharSingleByte;
+
+ if (dbcsCode < 0x100) {
+ newBuf[j++] = dbcsCode;
+ }
+ else if (dbcsCode < 0x10000) {
+ newBuf[j++] = dbcsCode >> 8; // high byte
+ newBuf[j++] = dbcsCode & 0xFF; // low byte
}
+ else if (dbcsCode < 0x1000000) {
+ newBuf[j++] = dbcsCode >> 16;
+ newBuf[j++] = (dbcsCode >> 8) & 0xFF;
+ newBuf[j++] = dbcsCode & 0xFF;
+ } else {
+ newBuf[j++] = dbcsCode >>> 24;
+ newBuf[j++] = (dbcsCode >>> 16) & 0xFF;
+ newBuf[j++] = (dbcsCode >>> 8) & 0xFF;
+ newBuf[j++] = dbcsCode & 0xFF;
+ }
+ }
- // set the env variables, then run scripts as a child process.
- var env = makeEnv(pkg, opts)
- env.npm_lifecycle_event = stage
- env.npm_node_execpath = env.NODE = env.NODE || process.execPath
- env.npm_execpath = require.main.filename
- env.INIT_CWD = process.cwd()
- env.npm_config_node_gyp = env.npm_config_node_gyp || DEFAULT_NODE_GYP_PATH
+ this.seqObj = seqObj;
+ this.leadSurrogate = leadSurrogate;
+ return newBuf.slice(0, j);
+}
- // 'nobody' typically doesn't have permission to write to /tmp
- // even if it's never used, sh freaks out.
- if (!opts.unsafePerm) env.TMPDIR = wd
+DBCSEncoder.prototype.end = function() {
+ if (this.leadSurrogate === -1 && this.seqObj === undefined)
+ return; // All clean. Most often case.
- lifecycle_(pkg, stage, wd, opts, env, (er) => {
- if (er) return reject(er)
- return resolve()
- })
- })
- })
- })
-}
+ var newBuf = Buffer.alloc(10), j = 0;
-function _incorrectWorkingDirectory (wd, pkg) {
- return wd.lastIndexOf(pkg.name) !== wd.length - pkg.name.length
-}
+ if (this.seqObj) { // We're in the sequence.
+ var dbcsCode = this.seqObj[DEF_CHAR];
+ if (dbcsCode !== undefined) { // Write beginning of the sequence.
+ if (dbcsCode < 0x100) {
+ newBuf[j++] = dbcsCode;
+ }
+ else {
+ newBuf[j++] = dbcsCode >> 8; // high byte
+ newBuf[j++] = dbcsCode & 0xFF; // low byte
+ }
+ } else {
+ // See todo above.
+ }
+ this.seqObj = undefined;
+ }
-function lifecycle_ (pkg, stage, wd, opts, env, cb) {
- var pathArr = []
- var p = wd.split(/[\\/]node_modules[\\/]/)
- var acc = path.resolve(p.shift())
+ if (this.leadSurrogate !== -1) {
+ // Incomplete surrogate pair - only lead surrogate found.
+ newBuf[j++] = this.defaultCharSingleByte;
+ this.leadSurrogate = -1;
+ }
+
+ return newBuf.slice(0, j);
+}
- p.forEach(function (pp) {
- pathArr.unshift(path.join(acc, 'node_modules', '.bin'))
- acc = path.join(acc, 'node_modules', pp)
- })
- pathArr.unshift(path.join(acc, 'node_modules', '.bin'))
+// Export for testing
+DBCSEncoder.prototype.findIdx = findIdx;
- // we also unshift the bundled node-gyp-bin folder so that
- // the bundled one will be used for installing things.
- pathArr.unshift(path.join(__dirname, 'node-gyp-bin'))
- if (shouldPrependCurrentNodeDirToPATH(opts)) {
- // prefer current node interpreter in child scripts
- pathArr.push(path.dirname(process.execPath))
- }
+// == Decoder ==================================================================
- const existingPath = mergePath(env)
- if (existingPath) pathArr.push(existingPath)
- const envPath = pathArr.join(isWindows ? ';' : ':')
- setPathEnv(env, envPath)
+function DBCSDecoder(options, codec) {
+ // Decoder state
+ this.nodeIdx = 0;
+ this.prevBytes = [];
- var packageLifecycle = pkg.scripts && pkg.scripts.hasOwnProperty(stage)
+ // Static data
+ this.decodeTables = codec.decodeTables;
+ this.decodeTableSeq = codec.decodeTableSeq;
+ this.defaultCharUnicode = codec.defaultCharUnicode;
+ this.gb18030 = codec.gb18030;
+}
- if (opts.ignoreScripts) {
- opts.log.info('lifecycle', logid(pkg, stage), 'ignored because ignore-scripts is set to true', pkg._id)
- packageLifecycle = false
- } else if (packageLifecycle) {
- // define this here so it's available to all scripts.
- env.npm_lifecycle_script = pkg.scripts[stage]
- } else {
- opts.log.silly('lifecycle', logid(pkg, stage), 'no script for ' + stage + ', continuing')
- }
+DBCSDecoder.prototype.write = function(buf) {
+ var newBuf = Buffer.alloc(buf.length*2),
+ nodeIdx = this.nodeIdx,
+ prevBytes = this.prevBytes, prevOffset = this.prevBytes.length,
+ seqStart = -this.prevBytes.length, // idx of the start of current parsed sequence.
+ uCode;
- function done (er) {
- if (er) {
- if (opts.force) {
- opts.log.info('lifecycle', logid(pkg, stage), 'forced, continuing', er)
- er = null
- } else if (opts.failOk) {
- opts.log.warn('lifecycle', logid(pkg, stage), 'continuing anyway', er.message)
- er = null
- }
- }
- cb(er)
- }
+ for (var i = 0, j = 0; i < buf.length; i++) {
+ var curByte = (i >= 0) ? buf[i] : prevBytes[i + prevOffset];
- chain(
- [
- packageLifecycle && [runPackageLifecycle, pkg, stage, env, wd, opts],
- [runHookLifecycle, pkg, stage, env, wd, opts]
- ],
- done
- )
-}
+ // Lookup in current trie node.
+ var uCode = this.decodeTables[nodeIdx][curByte];
-function shouldPrependCurrentNodeDirToPATH (opts) {
- const cfgsetting = opts.scriptsPrependNodePath
- if (cfgsetting === false) return false
- if (cfgsetting === true) return true
+ if (uCode >= 0) {
+ // Normal character, just use it.
+ }
+ else if (uCode === UNASSIGNED) { // Unknown char.
+ // TODO: Callback with seq.
+ uCode = this.defaultCharUnicode.charCodeAt(0);
+ i = seqStart; // Skip one byte ('i' will be incremented by the for loop) and try to parse again.
+ }
+ else if (uCode === GB18030_CODE) {
+ if (i >= 3) {
+ var ptr = (buf[i-3]-0x81)*12600 + (buf[i-2]-0x30)*1260 + (buf[i-1]-0x81)*10 + (curByte-0x30);
+ } else {
+ var ptr = (prevBytes[i-3+prevOffset]-0x81)*12600 +
+ (((i-2 >= 0) ? buf[i-2] : prevBytes[i-2+prevOffset])-0x30)*1260 +
+ (((i-1 >= 0) ? buf[i-1] : prevBytes[i-1+prevOffset])-0x81)*10 +
+ (curByte-0x30);
+ }
+ var idx = findIdx(this.gb18030.gbChars, ptr);
+ uCode = this.gb18030.uChars[idx] + ptr - this.gb18030.gbChars[idx];
+ }
+ else if (uCode <= NODE_START) { // Go to next trie node.
+ nodeIdx = NODE_START - uCode;
+ continue;
+ }
+ else if (uCode <= SEQ_START) { // Output a sequence of chars.
+ var seq = this.decodeTableSeq[SEQ_START - uCode];
+ for (var k = 0; k < seq.length - 1; k++) {
+ uCode = seq[k];
+ newBuf[j++] = uCode & 0xFF;
+ newBuf[j++] = uCode >> 8;
+ }
+ uCode = seq[seq.length-1];
+ }
+ else
+ throw new Error("iconv-lite internal error: invalid decoding table value " + uCode + " at " + nodeIdx + "/" + curByte);
- var isDifferentNodeInPath
+ // Write the character to buffer, handling higher planes using surrogate pair.
+ if (uCode >= 0x10000) {
+ uCode -= 0x10000;
+ var uCodeLead = 0xD800 | (uCode >> 10);
+ newBuf[j++] = uCodeLead & 0xFF;
+ newBuf[j++] = uCodeLead >> 8;
- var foundExecPath
- try {
- foundExecPath = which.sync(path.basename(process.execPath), { pathExt: isWindows ? ';' : ':' })
- // Apply `fs.realpath()` here to avoid false positives when `node` is a symlinked executable.
- isDifferentNodeInPath = fs.realpathSync(process.execPath).toUpperCase() !==
- fs.realpathSync(foundExecPath).toUpperCase()
- } catch (e) {
- isDifferentNodeInPath = true
- }
+ uCode = 0xDC00 | (uCode & 0x3FF);
+ }
+ newBuf[j++] = uCode & 0xFF;
+ newBuf[j++] = uCode >> 8;
- if (cfgsetting === 'warn-only') {
- if (isDifferentNodeInPath && !shouldPrependCurrentNodeDirToPATH.hasWarned) {
- if (foundExecPath) {
- opts.log.warn('lifecycle', 'The node binary used for scripts is', foundExecPath, 'but npm is using', process.execPath, 'itself. Use the `--scripts-prepend-node-path` option to include the path for the node binary npm was executed with.')
- } else {
- opts.log.warn('lifecycle', 'npm is using', process.execPath, 'but there is no node binary in the current PATH. Use the `--scripts-prepend-node-path` option to include the path for the node binary npm was executed with.')
- }
- shouldPrependCurrentNodeDirToPATH.hasWarned = true
+ // Reset trie node.
+ nodeIdx = 0; seqStart = i+1;
}
- return false
- }
+ this.nodeIdx = nodeIdx;
+ this.prevBytes = (seqStart >= 0)
+ ? Array.prototype.slice.call(buf, seqStart)
+ : prevBytes.slice(seqStart + prevOffset).concat(Array.prototype.slice.call(buf));
- return isDifferentNodeInPath
+ return newBuf.slice(0, j).toString('ucs2');
}
-function validWd (d, cb) {
- fs.stat(d, function (er, st) {
- if (er || !st.isDirectory()) {
- var p = path.dirname(d)
- if (p === d) {
- return cb(new Error('Could not find suitable wd'))
- }
- return validWd(p, cb)
- }
- return cb(null, d)
- })
-}
+DBCSDecoder.prototype.end = function() {
+ var ret = '';
-function runPackageLifecycle (pkg, stage, env, wd, opts, cb) {
- // run package lifecycle scripts in the package root, or the nearest parent.
- var cmd = env.npm_lifecycle_script
+ // Try to parse all remaining chars.
+ while (this.prevBytes.length > 0) {
+ // Skip 1 character in the buffer.
+ ret += this.defaultCharUnicode;
+ var bytesArr = this.prevBytes.slice(1);
- var note = '\n> ' + pkg._id + ' ' + stage + ' ' + wd +
- '\n> ' + cmd + '\n'
- runCmd(note, cmd, pkg, env, stage, wd, opts, cb)
-}
+ // Parse remaining as usual.
+ this.prevBytes = [];
+ this.nodeIdx = 0;
+ if (bytesArr.length > 0)
+ ret += this.write(bytesArr);
+ }
-var running = false
-var queue = []
-function dequeue () {
- running = false
- if (queue.length) {
- var r = queue.shift()
- runCmd.apply(null, r)
- }
+ this.prevBytes = [];
+ this.nodeIdx = 0;
+ return ret;
}
-function runCmd (note, cmd, pkg, env, stage, wd, opts, cb) {
- if (running) {
- queue.push([note, cmd, pkg, env, stage, wd, opts, cb])
- return
- }
+// Binary search for GB18030. Returns largest i such that table[i] <= val.
+function findIdx(table, val) {
+ if (table[0] > val)
+ return -1;
- running = true
- opts.log.pause()
- var unsafe = opts.unsafePerm
- var user = unsafe ? null : opts.user
- var group = unsafe ? null : opts.group
+ var l = 0, r = table.length;
+ while (l < r-1) { // always table[l] <= val < table[r]
+ var mid = l + ((r-l+1) >> 1);
+ if (table[mid] <= val)
+ l = mid;
+ else
+ r = mid;
+ }
+ return l;
+}
- if (opts.log.level !== 'silent') {
- opts.log.clearProgress()
- console.log(note)
- opts.log.showProgress()
- }
- opts.log.verbose('lifecycle', logid(pkg, stage), 'unsafe-perm in lifecycle', unsafe)
- if (isWindows) {
- unsafe = true
- }
- if (unsafe) {
- runCmd_(cmd, pkg, env, wd, opts, stage, unsafe, 0, 0, cb)
- } else {
- uidNumber(user, group, function (er, uid, gid) {
- if (er) {
- er.code = 'EUIDLOOKUP'
- opts.log.resume()
- process.nextTick(dequeue)
- return cb(er)
- }
- runCmd_(cmd, pkg, env, wd, opts, stage, unsafe, uid, gid, cb)
- })
- }
-}
+/***/ }),
+/* 190 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
-const getSpawnArgs = ({ cmd, wd, opts, uid, gid, unsafe, env }) => {
- const conf = {
- cwd: wd,
- env: env,
- stdio: opts.stdio || [ 0, 1, 2 ]
- }
+"use strict";
- if (!unsafe) {
- conf.uid = uid ^ 0
- conf.gid = gid ^ 0
- }
+var url = __webpack_require__(835)
+var gitHosts = __webpack_require__(813)
+var GitHost = module.exports = __webpack_require__(599)
- const customShell = opts.scriptShell
+var protocolToRepresentationMap = {
+ 'git+ssh:': 'sshurl',
+ 'git+https:': 'https',
+ 'ssh:': 'sshurl',
+ 'git:': 'git'
+}
- let sh = 'sh'
- let shFlag = '-c'
- if (customShell) {
- sh = customShell
- } else if (isWindows || opts._TESTING_FAKE_WINDOWS_) {
- sh = process.env.comspec || 'cmd'
- // '/d /s /c' is used only for cmd.exe.
- if (/^(?:.*\\)?cmd(?:\.exe)?$/i.test(sh)) {
- shFlag = '/d /s /c'
- conf.windowsVerbatimArguments = true
- }
- }
+function protocolToRepresentation (protocol) {
+ return protocolToRepresentationMap[protocol] || protocol.slice(0, -1)
+}
- return [sh, [shFlag, cmd], conf]
+var authProtocols = {
+ 'git:': true,
+ 'https:': true,
+ 'git+https:': true,
+ 'http:': true,
+ 'git+http:': true
}
-exports._getSpawnArgs = getSpawnArgs
+var cache = {}
-function runCmd_ (cmd, pkg, env, wd, opts, stage, unsafe, uid, gid, cb_) {
- function cb (er) {
- cb_.apply(null, arguments)
- opts.log.resume()
- process.nextTick(dequeue)
+module.exports.fromUrl = function (giturl, opts) {
+ if (typeof giturl !== 'string') return
+ var key = giturl + JSON.stringify(opts || {})
+
+ if (!(key in cache)) {
+ cache[key] = fromUrl(giturl, opts)
}
- const [sh, args, conf] = getSpawnArgs({ cmd, wd, opts, uid, gid, unsafe, env })
+ return cache[key]
+}
- opts.log.verbose('lifecycle', logid(pkg, stage), 'PATH:', env[PATH])
- opts.log.verbose('lifecycle', logid(pkg, stage), 'CWD:', wd)
- opts.log.silly('lifecycle', logid(pkg, stage), 'Args:', args)
+function fromUrl (giturl, opts) {
+ if (giturl == null || giturl === '') return
+ var url = fixupUnqualifiedGist(
+ isGitHubShorthand(giturl) ? 'github:' + giturl : giturl
+ )
+ var parsed = parseGitUrl(url)
+ var shortcutMatch = url.match(new RegExp('^([^:]+):(?:(?:[^@:]+(?:[^@]+)?@)?([^/]*))[/](.+?)(?:[.]git)?($|#)'))
+ var matches = Object.keys(gitHosts).map(function (gitHostName) {
+ try {
+ var gitHostInfo = gitHosts[gitHostName]
+ var auth = null
+ if (parsed.auth && authProtocols[parsed.protocol]) {
+ auth = parsed.auth
+ }
+ var committish = parsed.hash ? decodeURIComponent(parsed.hash.substr(1)) : null
+ var user = null
+ var project = null
+ var defaultRepresentation = null
+ if (shortcutMatch && shortcutMatch[1] === gitHostName) {
+ user = shortcutMatch[2] && decodeURIComponent(shortcutMatch[2])
+ project = decodeURIComponent(shortcutMatch[3])
+ defaultRepresentation = 'shortcut'
+ } else {
+ if (parsed.host && parsed.host !== gitHostInfo.domain && parsed.host.replace(/^www[.]/, '') !== gitHostInfo.domain) return
+ if (!gitHostInfo.protocols_re.test(parsed.protocol)) return
+ if (!parsed.path) return
+ var pathmatch = gitHostInfo.pathmatch
+ var matched = parsed.path.match(pathmatch)
+ if (!matched) return
+ /* istanbul ignore else */
+ if (matched[1] !== null && matched[1] !== undefined) {
+ user = decodeURIComponent(matched[1].replace(/^:/, ''))
+ }
+ project = decodeURIComponent(matched[2])
+ defaultRepresentation = protocolToRepresentation(parsed.protocol)
+ }
+ return new GitHost(gitHostName, user, auth, project, committish, defaultRepresentation, opts)
+ } catch (ex) {
+ /* istanbul ignore else */
+ if (ex instanceof URIError) {
+ } else throw ex
+ }
+ }).filter(function (gitHostInfo) { return gitHostInfo })
+ if (matches.length !== 1) return
+ return matches[0]
+}
- var proc = spawn(sh, args, conf, opts.log)
+function isGitHubShorthand (arg) {
+ // Note: This does not fully test the git ref format.
+ // See https://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html
+ //
+ // The only way to do this properly would be to shell out to
+ // git-check-ref-format, and as this is a fast sync function,
+ // we don't want to do that. Just let git fail if it turns
+ // out that the commit-ish is invalid.
+ // GH usernames cannot start with . or -
+ return /^[^:@%/\s.-][^:@%/\s]*[/][^:@\s/%]+(?:#.*)?$/.test(arg)
+}
- proc.on('error', procError)
- proc.on('close', function (code, signal) {
- opts.log.silly('lifecycle', logid(pkg, stage), 'Returned: code:', code, ' signal:', signal)
- if (signal) {
- process.kill(process.pid, signal)
- } else if (code) {
- var er = new Error('Exit status ' + code)
- er.errno = code
- }
- procError(er)
- })
- byline(proc.stdout).on('data', function (data) {
- opts.log.verbose('lifecycle', logid(pkg, stage), 'stdout', data.toString())
- })
- byline(proc.stderr).on('data', function (data) {
- opts.log.verbose('lifecycle', logid(pkg, stage), 'stderr', data.toString())
- })
- process.once('SIGTERM', procKill)
- process.once('SIGINT', procInterupt)
+function fixupUnqualifiedGist (giturl) {
+ // necessary for round-tripping gists
+ var parsed = url.parse(giturl)
+ if (parsed.protocol === 'gist:' && parsed.host && !parsed.path) {
+ return parsed.protocol + '/' + parsed.host
+ } else {
+ return giturl
+ }
+}
- function procError (er) {
- if (er) {
- opts.log.info('lifecycle', logid(pkg, stage), 'Failed to exec ' + stage + ' script')
- er.message = pkg._id + ' ' + stage + ': `' + cmd + '`\n' +
- er.message
- if (er.code !== 'EPERM') {
- er.code = 'ELIFECYCLE'
+function parseGitUrl (giturl) {
+ var matched = giturl.match(/^([^@]+)@([^:/]+):[/]?((?:[^/]+[/])?[^/]+?)(?:[.]git)?(#.*)?$/)
+ if (!matched) {
+ var legacy = url.parse(giturl)
+ // If we don't have url.URL, then sorry, this is just not fixable.
+ // This affects Node <= 6.12.
+ if (legacy.auth && typeof url.URL === 'function') {
+ // git urls can be in the form of scp-style/ssh-connect strings, like
+ // git+ssh://user@host.com:some/path, which the legacy url parser
+ // supports, but WhatWG url.URL class does not. However, the legacy
+ // parser de-urlencodes the username and password, so something like
+ // https://user%3An%40me:p%40ss%3Aword@x.com/ becomes
+ // https://user:n@me:p@ss:word@x.com/ which is all kinds of wrong.
+ // Pull off just the auth and host, so we dont' get the confusing
+ // scp-style URL, then pass that to the WhatWG parser to get the
+ // auth properly escaped.
+ var authmatch = giturl.match(/[^@]+@[^:/]+/)
+ /* istanbul ignore else - this should be impossible */
+ if (authmatch) {
+ var whatwg = new url.URL(authmatch[0])
+ legacy.auth = whatwg.username || ''
+ if (whatwg.password) legacy.auth += ':' + whatwg.password
}
- fs.stat(opts.dir, function (statError, d) {
- if (statError && statError.code === 'ENOENT' && opts.dir.split(path.sep).slice(-1)[0] === 'node_modules') {
- opts.log.warn('', 'Local package.json exists, but node_modules missing, did you mean to install?')
- }
- })
- er.pkgid = pkg._id
- er.stage = stage
- er.script = cmd
- er.pkgname = pkg.name
}
- process.removeListener('SIGTERM', procKill)
- process.removeListener('SIGTERM', procInterupt)
- process.removeListener('SIGINT', procKill)
- process.removeListener('SIGINT', procInterupt)
- return cb(er)
- }
- function procKill () {
- proc.kill()
+ return legacy
}
- function procInterupt () {
- proc.kill('SIGINT')
- proc.on('exit', function () {
- process.exit()
- })
- process.once('SIGINT', procKill)
+ return {
+ protocol: 'git+ssh:',
+ slashes: true,
+ auth: matched[1],
+ host: matched[2],
+ port: null,
+ hostname: matched[2],
+ hash: matched[4],
+ search: null,
+ query: null,
+ pathname: '/' + matched[3],
+ path: '/' + matched[3],
+ href: 'git+ssh://' + matched[1] + '@' + matched[2] +
+ '/' + matched[3] + (matched[4] || '')
}
}
-function runHookLifecycle (pkg, stage, env, wd, opts, cb) {
- hookStat(opts.dir, stage, function (er) {
- if (er) return cb()
- var cmd = path.join(opts.dir, '.hooks', stage)
- var note = '\n> ' + pkg._id + ' ' + stage + ' ' + wd +
- '\n> ' + cmd
- runCmd(note, cmd, pkg, env, stage, wd, opts, cb)
- })
-}
-function makeEnv (data, opts, prefix, env) {
- prefix = prefix || 'npm_package_'
- if (!env) {
- env = {}
- for (var i in process.env) {
- if (!i.match(/^npm_/)) {
- env[i] = process.env[i]
- }
- }
+/***/ }),
+/* 191 */
+/***/ (function(module) {
- // express and others respect the NODE_ENV value.
- if (opts.production) env.NODE_ENV = 'production'
- } else if (!data.hasOwnProperty('_lifecycleEnv')) {
- Object.defineProperty(data, '_lifecycleEnv',
- {
- value: env,
- enumerable: false
- }
- )
- }
+module.exports = require("querystring");
- if (opts.nodeOptions) env.NODE_OPTIONS = opts.nodeOptions
+/***/ }),
+/* 192 */,
+/* 193 */,
+/* 194 */,
+/* 195 */,
+/* 196 */,
+/* 197 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
- for (i in data) {
- if (i.charAt(0) !== '_') {
- var envKey = (prefix + i).replace(/[^a-zA-Z0-9_]/g, '_')
- if (i === 'readme') {
- continue
- }
- if (data[i] && typeof data[i] === 'object') {
- try {
- // quick and dirty detection for cyclical structures
- JSON.stringify(data[i])
- makeEnv(data[i], opts, envKey + '_', env)
- } catch (ex) {
- // usually these are package objects.
- // just get the path and basic details.
- var d = data[i]
- makeEnv(
- { name: d.name, version: d.version, path: d.path },
- opts,
- envKey + '_',
- env
- )
- }
- } else {
- env[envKey] = String(data[i])
- env[envKey] = env[envKey].indexOf('\n') !== -1
- ? JSON.stringify(env[envKey])
- : env[envKey]
- }
- }
- }
+"use strict";
- if (prefix !== 'npm_package_') return env
- prefix = 'npm_config_'
- var pkgConfig = {}
- var pkgVerConfig = {}
- var namePref = data.name + ':'
- var verPref = data.name + '@' + data.version + ':'
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = void 0;
- Object.keys(opts.config).forEach(function (i) {
- // in some rare cases (e.g. working with nerf darts), there are segmented
- // "private" (underscore-prefixed) config names -- don't export
- if ((i.charAt(0) === '_' && i.indexOf('_' + namePref) !== 0) || i.match(/:_/)) {
- return
- }
- var value = opts.config[i]
- if (value instanceof Stream || Array.isArray(value) || typeof value === 'function') return
- if (i.match(/umask/)) value = umask.toString(value)
+var _validate = _interopRequireDefault(__webpack_require__(676));
- if (!value) value = ''
- else if (typeof value === 'number') value = '' + value
- else if (typeof value !== 'string') value = JSON.stringify(value)
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
- if (typeof value !== 'string') {
- return
- }
+function parse(uuid) {
+ if (!(0, _validate.default)(uuid)) {
+ throw TypeError('Invalid UUID');
+ }
- value = value.indexOf('\n') !== -1
- ? JSON.stringify(value)
- : value
- i = i.replace(/^_+/, '')
- var k
- if (i.indexOf(namePref) === 0) {
- k = i.substr(namePref.length).replace(/[^a-zA-Z0-9_]/g, '_')
- pkgConfig[k] = value
- } else if (i.indexOf(verPref) === 0) {
- k = i.substr(verPref.length).replace(/[^a-zA-Z0-9_]/g, '_')
- pkgVerConfig[k] = value
- }
- var envKey = (prefix + i).replace(/[^a-zA-Z0-9_]/g, '_')
- env[envKey] = value
- })
+ let v;
+ const arr = new Uint8Array(16); // Parse ########-....-....-....-............
- prefix = 'npm_package_config_'
- ;[pkgConfig, pkgVerConfig].forEach(function (conf) {
- for (var i in conf) {
- var envKey = (prefix + i)
- env[envKey] = conf[i]
- }
- })
+ arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;
+ arr[1] = v >>> 16 & 0xff;
+ arr[2] = v >>> 8 & 0xff;
+ arr[3] = v & 0xff; // Parse ........-####-....-....-............
- return env
+ arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;
+ arr[5] = v & 0xff; // Parse ........-....-####-....-............
+
+ arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;
+ arr[7] = v & 0xff; // Parse ........-....-....-####-............
+
+ arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;
+ arr[9] = v & 0xff; // Parse ........-....-....-....-############
+ // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes)
+
+ arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;
+ arr[11] = v / 0x100000000 & 0xff;
+ arr[12] = v >>> 24 & 0xff;
+ arr[13] = v >>> 16 & 0xff;
+ arr[14] = v >>> 8 & 0xff;
+ arr[15] = v & 0xff;
+ return arr;
}
+var _default = parse;
+exports.default = _default;
/***/ }),
-/* 261 */,
-/* 262 */
-/***/ (function(module) {
-
-var toString = {}.toString;
+/* 198 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
-module.exports = Array.isArray || function (arr) {
- return toString.call(arr) == '[object Array]';
-};
+"use strict";
+function __export(m) {
+ for (var p in m) if (!exports.hasOwnProperty(p)) exports[p] = m[p];
+}
+Object.defineProperty(exports, "__esModule", { value: true });
+__export(__webpack_require__(359));
+//# sourceMappingURL=index.js.map
/***/ }),
-/* 263 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
+/* 199 */,
+/* 200 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
-Object.defineProperty(exports, '__esModule', { value: true });
+// A linked list to keep track of recently-used-ness
+const Yallist = __webpack_require__(381)
-var api = __webpack_require__(440);
-var tslib = __webpack_require__(144);
+const MAX = Symbol('max')
+const LENGTH = Symbol('length')
+const LENGTH_CALCULATOR = Symbol('lengthCalculator')
+const ALLOW_STALE = Symbol('allowStale')
+const MAX_AGE = Symbol('maxAge')
+const DISPOSE = Symbol('dispose')
+const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet')
+const LRU_LIST = Symbol('lruList')
+const CACHE = Symbol('cache')
+const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet')
-// Copyright (c) Microsoft Corporation.
-/**
- * A no-op implementation of Span that can safely be used without side-effects.
- */
-var NoOpSpan = /** @class */ (function () {
- function NoOpSpan() {
+const naiveLength = () => 1
+
+// lruList is a yallist where the head is the youngest
+// item, and the tail is the oldest. the list contains the Hit
+// objects as the entries.
+// Each Hit object has a reference to its Yallist.Node. This
+// never changes.
+//
+// cache is a Map (or PseudoMap) that matches the keys to
+// the Yallist.Node object.
+class LRUCache {
+ constructor (options) {
+ if (typeof options === 'number')
+ options = { max: options }
+
+ if (!options)
+ options = {}
+
+ if (options.max && (typeof options.max !== 'number' || options.max < 0))
+ throw new TypeError('max must be a non-negative number')
+ // Kind of weird to have a default max of Infinity, but oh well.
+ const max = this[MAX] = options.max || Infinity
+
+ const lc = options.length || naiveLength
+ this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc
+ this[ALLOW_STALE] = options.stale || false
+ if (options.maxAge && typeof options.maxAge !== 'number')
+ throw new TypeError('maxAge must be a number')
+ this[MAX_AGE] = options.maxAge || 0
+ this[DISPOSE] = options.dispose
+ this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false
+ this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false
+ this.reset()
+ }
+
+ // resize the cache when the max changes.
+ set max (mL) {
+ if (typeof mL !== 'number' || mL < 0)
+ throw new TypeError('max must be a non-negative number')
+
+ this[MAX] = mL || Infinity
+ trim(this)
+ }
+ get max () {
+ return this[MAX]
+ }
+
+ set allowStale (allowStale) {
+ this[ALLOW_STALE] = !!allowStale
+ }
+ get allowStale () {
+ return this[ALLOW_STALE]
+ }
+
+ set maxAge (mA) {
+ if (typeof mA !== 'number')
+ throw new TypeError('maxAge must be a non-negative number')
+
+ this[MAX_AGE] = mA
+ trim(this)
+ }
+ get maxAge () {
+ return this[MAX_AGE]
+ }
+
+ // resize the cache when the lengthCalculator changes.
+ set lengthCalculator (lC) {
+ if (typeof lC !== 'function')
+ lC = naiveLength
+
+ if (lC !== this[LENGTH_CALCULATOR]) {
+ this[LENGTH_CALCULATOR] = lC
+ this[LENGTH] = 0
+ this[LRU_LIST].forEach(hit => {
+ hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key)
+ this[LENGTH] += hit.length
+ })
}
- /**
- * Returns the SpanContext associated with this Span.
- */
- NoOpSpan.prototype.context = function () {
- return {
- spanId: "",
- traceId: "",
- traceFlags: api.TraceFlags.NONE
- };
- };
- /**
- * Marks the end of Span execution.
- * @param _endTime The time to use as the Span's end time. Defaults to
- * the current time.
- */
- NoOpSpan.prototype.end = function (_endTime) {
- /* Noop */
- };
- /**
- * Sets an attribute on the Span
- * @param _key the attribute key
- * @param _value the attribute value
- */
- NoOpSpan.prototype.setAttribute = function (_key, _value) {
- return this;
- };
- /**
- * Sets attributes on the Span
- * @param _attributes the attributes to add
- */
- NoOpSpan.prototype.setAttributes = function (_attributes) {
- return this;
- };
- /**
- * Adds an event to the Span
- * @param _name The name of the event
- * @param _attributes The associated attributes to add for this event
- */
- NoOpSpan.prototype.addEvent = function (_name, _attributes) {
- return this;
- };
- /**
- * Sets a status on the span. Overrides the default of CanonicalCode.OK.
- * @param _status The status to set.
- */
- NoOpSpan.prototype.setStatus = function (_status) {
- return this;
- };
- /**
- * Updates the name of the Span
- * @param _name the new Span name
- */
- NoOpSpan.prototype.updateName = function (_name) {
- return this;
- };
- /**
- * Returns whether this span will be recorded
- */
- NoOpSpan.prototype.isRecording = function () {
- return false;
- };
- return NoOpSpan;
-}());
+ trim(this)
+ }
+ get lengthCalculator () { return this[LENGTH_CALCULATOR] }
-// Copyright (c) Microsoft Corporation.
-/**
- * A no-op implementation of Tracer that can be used when tracing
- * is disabled.
- */
-var NoOpTracer = /** @class */ (function () {
- function NoOpTracer() {
+ get length () { return this[LENGTH] }
+ get itemCount () { return this[LRU_LIST].length }
+
+ rforEach (fn, thisp) {
+ thisp = thisp || this
+ for (let walker = this[LRU_LIST].tail; walker !== null;) {
+ const prev = walker.prev
+ forEachStep(this, fn, walker, thisp)
+ walker = prev
}
- /**
- * Starts a new Span.
- * @param _name The name of the span.
- * @param _options The SpanOptions used during Span creation.
- */
- NoOpTracer.prototype.startSpan = function (_name, _options) {
- return new NoOpSpan();
- };
- /**
- * Returns the current Span from the current context, if available.
- */
- NoOpTracer.prototype.getCurrentSpan = function () {
- return new NoOpSpan();
- };
- /**
- * Executes the given function within the context provided by a Span.
- * @param _span The span that provides the context.
- * @param fn The function to be executed.
- */
- NoOpTracer.prototype.withSpan = function (_span, fn) {
- return fn();
- };
- /**
- * Bind a Span as the target's scope
- * @param target An object to bind the scope.
- * @param _span A specific Span to use. Otherwise, use the current one.
- */
- NoOpTracer.prototype.bind = function (target, _span) {
- return target;
- };
- return NoOpTracer;
-}());
+ }
-// Copyright (c) Microsoft Corporation.
-// Licensed under the MIT license.
-function getGlobalObject() {
- return global;
-}
+ forEach (fn, thisp) {
+ thisp = thisp || this
+ for (let walker = this[LRU_LIST].head; walker !== null;) {
+ const next = walker.next
+ forEachStep(this, fn, walker, thisp)
+ walker = next
+ }
+ }
-// Copyright (c) Microsoft Corporation.
-// V1 = OpenTelemetry 0.1
-// V2 = OpenTelemetry 0.2
-// V3 = OpenTelemetry 0.6.1
-var GLOBAL_TRACER_VERSION = 3;
-// preview5 shipped with @azure/core-tracing.tracerCache
-// and didn't have smart detection for collisions
-var GLOBAL_TRACER_SYMBOL = Symbol.for("@azure/core-tracing.tracerCache2");
-var cache;
-function loadTracerCache() {
- var globalObj = getGlobalObject();
- var existingCache = globalObj[GLOBAL_TRACER_SYMBOL];
- var setGlobalCache = true;
- if (existingCache) {
- if (existingCache.version === GLOBAL_TRACER_VERSION) {
- cache = existingCache;
- }
- else {
- setGlobalCache = false;
- if (existingCache.tracer) {
- throw new Error("Two incompatible versions of @azure/core-tracing have been loaded.\n This library is " + GLOBAL_TRACER_VERSION + ", existing is " + existingCache.version + ".");
- }
- }
+ keys () {
+ return this[LRU_LIST].toArray().map(k => k.key)
+ }
+
+ values () {
+ return this[LRU_LIST].toArray().map(k => k.value)
+ }
+
+ reset () {
+ if (this[DISPOSE] &&
+ this[LRU_LIST] &&
+ this[LRU_LIST].length) {
+ this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value))
}
- if (!cache) {
- cache = {
- tracer: undefined,
- version: GLOBAL_TRACER_VERSION
- };
+
+ this[CACHE] = new Map() // hash of items by key
+ this[LRU_LIST] = new Yallist() // list of items in order of use recency
+ this[LENGTH] = 0 // length of items in the list
+ }
+
+ dump () {
+ return this[LRU_LIST].map(hit =>
+ isStale(this, hit) ? false : {
+ k: hit.key,
+ v: hit.value,
+ e: hit.now + (hit.maxAge || 0)
+ }).toArray().filter(h => h)
+ }
+
+ dumpLru () {
+ return this[LRU_LIST]
+ }
+
+ set (key, value, maxAge) {
+ maxAge = maxAge || this[MAX_AGE]
+
+ if (maxAge && typeof maxAge !== 'number')
+ throw new TypeError('maxAge must be a number')
+
+ const now = maxAge ? Date.now() : 0
+ const len = this[LENGTH_CALCULATOR](value, key)
+
+ if (this[CACHE].has(key)) {
+ if (len > this[MAX]) {
+ del(this, this[CACHE].get(key))
+ return false
+ }
+
+ const node = this[CACHE].get(key)
+ const item = node.value
+
+ // dispose of the old one before overwriting
+ // split out into 2 ifs for better coverage tracking
+ if (this[DISPOSE]) {
+ if (!this[NO_DISPOSE_ON_SET])
+ this[DISPOSE](key, item.value)
+ }
+
+ item.now = now
+ item.maxAge = maxAge
+ item.value = value
+ this[LENGTH] += len - item.length
+ item.length = len
+ this.get(key)
+ trim(this)
+ return true
}
- if (setGlobalCache) {
- globalObj[GLOBAL_TRACER_SYMBOL] = cache;
+
+ const hit = new Entry(key, value, len, now, maxAge)
+
+ // oversized objects fall out of cache automatically.
+ if (hit.length > this[MAX]) {
+ if (this[DISPOSE])
+ this[DISPOSE](key, value)
+
+ return false
}
-}
-function getCache() {
- if (!cache) {
- loadTracerCache();
+
+ this[LENGTH] += hit.length
+ this[LRU_LIST].unshift(hit)
+ this[CACHE].set(key, this[LRU_LIST].head)
+ trim(this)
+ return true
+ }
+
+ has (key) {
+ if (!this[CACHE].has(key)) return false
+ const hit = this[CACHE].get(key).value
+ return !isStale(this, hit)
+ }
+
+ get (key) {
+ return get(this, key, true)
+ }
+
+ peek (key) {
+ return get(this, key, false)
+ }
+
+ pop () {
+ const node = this[LRU_LIST].tail
+ if (!node)
+ return null
+
+ del(this, node)
+ return node.value
+ }
+
+ del (key) {
+ del(this, this[CACHE].get(key))
+ }
+
+ load (arr) {
+ // reset the cache
+ this.reset()
+
+ const now = Date.now()
+ // A previous serialized cache has the most recent items first
+ for (let l = arr.length - 1; l >= 0; l--) {
+ const hit = arr[l]
+ const expiresAt = hit.e || 0
+ if (expiresAt === 0)
+ // the item was created without expiration in a non aged cache
+ this.set(hit.k, hit.v)
+ else {
+ const maxAge = expiresAt - now
+ // dont add already expired items
+ if (maxAge > 0) {
+ this.set(hit.k, hit.v, maxAge)
+ }
+ }
}
- return cache;
+ }
+
+ prune () {
+ this[CACHE].forEach((value, key) => get(this, key, false))
+ }
}
-// Copyright (c) Microsoft Corporation.
-var defaultTracer;
-function getDefaultTracer() {
- if (!defaultTracer) {
- defaultTracer = new NoOpTracer();
+const get = (self, key, doUse) => {
+ const node = self[CACHE].get(key)
+ if (node) {
+ const hit = node.value
+ if (isStale(self, hit)) {
+ del(self, node)
+ if (!self[ALLOW_STALE])
+ return undefined
+ } else {
+ if (doUse) {
+ if (self[UPDATE_AGE_ON_GET])
+ node.value.now = Date.now()
+ self[LRU_LIST].unshiftNode(node)
+ }
}
- return defaultTracer;
-}
-/**
- * Sets the global tracer, enabling tracing for the Azure SDK.
- * @param tracer An OpenTelemetry Tracer instance.
- */
-function setTracer(tracer) {
- var cache = getCache();
- cache.tracer = tracer;
+ return hit.value
+ }
}
-/**
- * Retrieves the active tracer, or returns a
- * no-op implementation if one is not set.
- */
-function getTracer() {
- var cache = getCache();
- if (!cache.tracer) {
- return getDefaultTracer();
- }
- return cache.tracer;
+
+const isStale = (self, hit) => {
+ if (!hit || (!hit.maxAge && !self[MAX_AGE]))
+ return false
+
+ const diff = Date.now() - hit.now
+ return hit.maxAge ? diff > hit.maxAge
+ : self[MAX_AGE] && (diff > self[MAX_AGE])
}
-// Copyright (c) Microsoft Corporation.
-// Licensed under the MIT license.
-/**
- * @ignore
- * @internal
- */
-var OpenCensusTraceStateWrapper = /** @class */ (function () {
- function OpenCensusTraceStateWrapper(state) {
- this._state = state;
+const trim = self => {
+ if (self[LENGTH] > self[MAX]) {
+ for (let walker = self[LRU_LIST].tail;
+ self[LENGTH] > self[MAX] && walker !== null;) {
+ // We know that we're about to delete this one, and also
+ // what the next least recently used key will be, so just
+ // go ahead and set it now.
+ const prev = walker.prev
+ del(self, walker)
+ walker = prev
}
- OpenCensusTraceStateWrapper.prototype.get = function (_key) {
- throw new Error("Method not implemented.");
- };
- OpenCensusTraceStateWrapper.prototype.set = function (_key, _value) {
- throw new Error("Method not implemented.");
- };
- OpenCensusTraceStateWrapper.prototype.unset = function (_key) {
- throw new Error("Method not implemented");
- };
- OpenCensusTraceStateWrapper.prototype.serialize = function () {
- return this._state || "";
- };
- return OpenCensusTraceStateWrapper;
-}());
-
-// Copyright (c) Microsoft Corporation.
-function isWrappedSpan(span) {
- return !!span && span.getWrappedSpan !== undefined;
-}
-function isTracer(tracerOrSpan) {
- return tracerOrSpan.getWrappedTracer !== undefined;
+ }
}
-/**
- * An implementation of OpenTelemetry Span that wraps an OpenCensus Span.
- */
-var OpenCensusSpanWrapper = /** @class */ (function () {
- function OpenCensusSpanWrapper(tracerOrSpan, name, options) {
- if (name === void 0) { name = ""; }
- if (options === void 0) { options = {}; }
- if (isTracer(tracerOrSpan)) {
- var parent = isWrappedSpan(options.parent) ? options.parent.getWrappedSpan() : undefined;
- this._span = tracerOrSpan.getWrappedTracer().startChildSpan({
- name: name,
- childOf: parent
- });
- this._span.start();
- if (options.links) {
- for (var _i = 0, _a = options.links; _i < _a.length; _i++) {
- var link = _a[_i];
- // Since there is no way to set the link relationship, leave it as Unspecified.
- this._span.addLink(link.context.traceId, link.context.spanId, 0 /* LinkType.UNSPECIFIED */, link.attributes);
- }
- }
- }
- else {
- this._span = tracerOrSpan;
- }
- }
- /**
- * The underlying OpenCensus Span
- */
- OpenCensusSpanWrapper.prototype.getWrappedSpan = function () {
- return this._span;
- };
- /**
- * Marks the end of Span execution.
- * @param endTime The time to use as the Span's end time. Defaults to
- * the current time.
- */
- OpenCensusSpanWrapper.prototype.end = function (_endTime) {
- this._span.end();
- };
- /**
- * Returns the SpanContext associated with this Span.
- */
- OpenCensusSpanWrapper.prototype.context = function () {
- var openCensusSpanContext = this._span.spanContext;
- return {
- spanId: openCensusSpanContext.spanId,
- traceId: openCensusSpanContext.traceId,
- traceFlags: openCensusSpanContext.options,
- traceState: new OpenCensusTraceStateWrapper(openCensusSpanContext.traceState)
- };
- };
- /**
- * Sets an attribute on the Span
- * @param key the attribute key
- * @param value the attribute value
- */
- OpenCensusSpanWrapper.prototype.setAttribute = function (key, value) {
- this._span.addAttribute(key, value);
- return this;
- };
- /**
- * Sets attributes on the Span
- * @param attributes the attributes to add
- */
- OpenCensusSpanWrapper.prototype.setAttributes = function (attributes) {
- this._span.attributes = attributes;
- return this;
- };
- /**
- * Adds an event to the Span
- * @param name The name of the event
- * @param attributes The associated attributes to add for this event
- */
- OpenCensusSpanWrapper.prototype.addEvent = function (_name, _attributes) {
- throw new Error("Method not implemented.");
- };
- /**
- * Sets a status on the span. Overrides the default of CanonicalCode.OK.
- * @param status The status to set.
- */
- OpenCensusSpanWrapper.prototype.setStatus = function (status) {
- this._span.setStatus(status.code, status.message);
- return this;
- };
- /**
- * Updates the name of the Span
- * @param name the new Span name
- */
- OpenCensusSpanWrapper.prototype.updateName = function (name) {
- this._span.name = name;
- return this;
- };
- /**
- * Returns whether this span will be recorded
- */
- OpenCensusSpanWrapper.prototype.isRecording = function () {
- // NoRecordSpans have an empty traceId
- return !!this._span.traceId;
- };
- return OpenCensusSpanWrapper;
-}());
-
-// Copyright (c) Microsoft Corporation.
-/**
- * An implementation of OpenTelemetry Tracer that wraps an OpenCensus Tracer.
- */
-var OpenCensusTracerWrapper = /** @class */ (function () {
- /**
- * Create a new wrapper around a given OpenCensus Tracer.
- * @param tracer The OpenCensus Tracer to wrap.
- */
- function OpenCensusTracerWrapper(tracer) {
- this._tracer = tracer;
- }
- /**
- * The wrapped OpenCensus Tracer
- */
- OpenCensusTracerWrapper.prototype.getWrappedTracer = function () {
- return this._tracer;
- };
- /**
- * Starts a new Span.
- * @param name The name of the span.
- * @param options The SpanOptions used during Span creation.
- */
- OpenCensusTracerWrapper.prototype.startSpan = function (name, options) {
- return new OpenCensusSpanWrapper(this, name, options);
- };
- /**
- * Returns the current Span from the current context, if available.
- */
- OpenCensusTracerWrapper.prototype.getCurrentSpan = function () {
- return undefined;
- };
- /**
- * Executes the given function within the context provided by a Span.
- * @param _span The span that provides the context.
- * @param _fn The function to be executed.
- */
- OpenCensusTracerWrapper.prototype.withSpan = function (_span, _fn) {
- throw new Error("Method not implemented.");
- };
- /**
- * Bind a Span as the target's scope
- * @param target An object to bind the scope.
- * @param _span A specific Span to use. Otherwise, use the current one.
- */
- OpenCensusTracerWrapper.prototype.bind = function (_target, _span) {
- throw new Error("Method not implemented.");
- };
- return OpenCensusTracerWrapper;
-}());
-// Copyright (c) Microsoft Corporation.
-/**
- * A mock span useful for testing.
- */
-var TestSpan = /** @class */ (function (_super) {
- tslib.__extends(TestSpan, _super);
- /**
- * Starts a new Span.
- * @param parentTracer The tracer that created this Span
- * @param name The name of the span.
- * @param context The SpanContext this span belongs to
- * @param kind The SpanKind of this Span
- * @param parentSpanId The identifier of the parent Span
- * @param startTime The startTime of the event (defaults to now)
- */
- function TestSpan(parentTracer, name, context, kind, parentSpanId, startTime) {
- if (startTime === void 0) { startTime = Date.now(); }
- var _this = _super.call(this) || this;
- _this._tracer = parentTracer;
- _this.name = name;
- _this.kind = kind;
- _this.startTime = startTime;
- _this.parentSpanId = parentSpanId;
- _this.status = {
- code: api.CanonicalCode.OK
- };
- _this.endCalled = false;
- _this._context = context;
- _this.attributes = {};
- return _this;
- }
- /**
- * Returns the Tracer that created this Span
- */
- TestSpan.prototype.tracer = function () {
- return this._tracer;
- };
- /**
- * Returns the SpanContext associated with this Span.
- */
- TestSpan.prototype.context = function () {
- return this._context;
- };
- /**
- * Marks the end of Span execution.
- * @param _endTime The time to use as the Span's end time. Defaults to
- * the current time.
- */
- TestSpan.prototype.end = function (_endTime) {
- this.endCalled = true;
- };
- /**
- * Sets a status on the span. Overrides the default of CanonicalCode.OK.
- * @param status The status to set.
- */
- TestSpan.prototype.setStatus = function (status) {
- this.status = status;
- return this;
- };
- /**
- * Returns whether this span will be recorded
- */
- TestSpan.prototype.isRecording = function () {
- return true;
- };
- /**
- * Sets an attribute on the Span
- * @param key the attribute key
- * @param value the attribute value
- */
- TestSpan.prototype.setAttribute = function (key, value) {
- this.attributes[key] = value;
- return this;
- };
- /**
- * Sets attributes on the Span
- * @param attributes the attributes to add
- */
- TestSpan.prototype.setAttributes = function (attributes) {
- for (var _i = 0, _a = Object.keys(attributes); _i < _a.length; _i++) {
- var key = _a[_i];
- this.attributes[key] = attributes[key];
- }
- return this;
- };
- return TestSpan;
-}(NoOpSpan));
+const del = (self, node) => {
+ if (node) {
+ const hit = node.value
+ if (self[DISPOSE])
+ self[DISPOSE](hit.key, hit.value)
-// Copyright (c) Microsoft Corporation.
-/**
- * A mock tracer useful for testing
- */
-var TestTracer = /** @class */ (function (_super) {
- tslib.__extends(TestTracer, _super);
- function TestTracer() {
- var _this = _super !== null && _super.apply(this, arguments) || this;
- _this.traceIdCounter = 0;
- _this.spanIdCounter = 0;
- _this.rootSpans = [];
- _this.knownSpans = [];
- return _this;
- }
- TestTracer.prototype.getNextTraceId = function () {
- this.traceIdCounter++;
- return String(this.traceIdCounter);
- };
- TestTracer.prototype.getNextSpanId = function () {
- this.spanIdCounter++;
- return String(this.spanIdCounter);
- };
- /**
- * Returns all Spans that were created without a parent
- */
- TestTracer.prototype.getRootSpans = function () {
- return this.rootSpans;
- };
- /**
- * Returns all Spans this Tracer knows about
- */
- TestTracer.prototype.getKnownSpans = function () {
- return this.knownSpans;
- };
- /**
- * Returns all Spans where end() has not been called
- */
- TestTracer.prototype.getActiveSpans = function () {
- return this.knownSpans.filter(function (span) {
- return !span.endCalled;
- });
- };
- /**
- * Return all Spans for a particular trace, grouped by their
- * parent Span in a tree-like structure
- * @param traceId The traceId to return the graph for
- */
- TestTracer.prototype.getSpanGraph = function (traceId) {
- var traceSpans = this.knownSpans.filter(function (span) {
- return span.context().traceId === traceId;
- });
- var roots = [];
- var nodeMap = new Map();
- for (var _i = 0, traceSpans_1 = traceSpans; _i < traceSpans_1.length; _i++) {
- var span = traceSpans_1[_i];
- var spanId = span.context().spanId;
- var node = {
- name: span.name,
- children: []
- };
- nodeMap.set(spanId, node);
- if (span.parentSpanId) {
- var parent = nodeMap.get(span.parentSpanId);
- if (!parent) {
- throw new Error("Span with name " + node.name + " has an unknown parentSpan with id " + span.parentSpanId);
- }
- parent.children.push(node);
- }
- else {
- roots.push(node);
- }
- }
- return {
- roots: roots
- };
- };
- /**
- * Starts a new Span.
- * @param name The name of the span.
- * @param options The SpanOptions used during Span creation.
- */
- TestTracer.prototype.startSpan = function (name, options) {
- if (options === void 0) { options = {}; }
- var parentContext = this._getParentContext(options);
- var traceId;
- var isRootSpan = false;
- if (parentContext && parentContext.traceId) {
- traceId = parentContext.traceId;
- }
- else {
- traceId = this.getNextTraceId();
- isRootSpan = true;
- }
- var context = {
- traceId: traceId,
- spanId: this.getNextSpanId(),
- traceFlags: api.TraceFlags.NONE
- };
- var span = new TestSpan(this, name, context, options.kind || api.SpanKind.INTERNAL, parentContext ? parentContext.spanId : undefined, options.startTime);
- this.knownSpans.push(span);
- if (isRootSpan) {
- this.rootSpans.push(span);
- }
- return span;
- };
- TestTracer.prototype._getParentContext = function (options) {
- var parent = options.parent;
- var result;
- if (parent) {
- if ("traceId" in parent) {
- result = parent;
- }
- else {
- result = parent.context();
- }
- }
- return result;
- };
- return TestTracer;
-}(NoOpTracer));
+ self[LENGTH] -= hit.length
+ self[CACHE].delete(hit.key)
+ self[LRU_LIST].removeNode(node)
+ }
+}
-// Copyright (c) Microsoft Corporation.
-// Licensed under the MIT license.
-var VERSION = "00";
-/**
- * Generates a `SpanContext` given a `traceparent` header value.
- * @param traceParent Serialized span context data as a `traceparent` header value.
- * @returns The `SpanContext` generated from the `traceparent` value.
- */
-function extractSpanContextFromTraceParentHeader(traceParentHeader) {
- var parts = traceParentHeader.split("-");
- if (parts.length !== 4) {
- return;
- }
- var version = parts[0], traceId = parts[1], spanId = parts[2], traceOptions = parts[3];
- if (version !== VERSION) {
- return;
- }
- var traceFlags = parseInt(traceOptions, 16);
- var spanContext = {
- spanId: spanId,
- traceId: traceId,
- traceFlags: traceFlags
- };
- return spanContext;
+class Entry {
+ constructor (key, value, length, now, maxAge) {
+ this.key = key
+ this.value = value
+ this.length = length
+ this.now = now
+ this.maxAge = maxAge || 0
+ }
}
-/**
- * Generates a `traceparent` value given a span context.
- * @param spanContext Contains context for a specific span.
- * @returns The `spanContext` represented as a `traceparent` value.
- */
-function getTraceParentHeader(spanContext) {
- var missingFields = [];
- if (!spanContext.traceId) {
- missingFields.push("traceId");
- }
- if (!spanContext.spanId) {
- missingFields.push("spanId");
- }
- if (missingFields.length) {
- return;
- }
- var flags = spanContext.traceFlags || 0 /* NONE */;
- var hexFlags = flags.toString(16);
- var traceFlags = hexFlags.length === 1 ? "0" + hexFlags : hexFlags;
- // https://www.w3.org/TR/trace-context/#traceparent-header-field-values
- return VERSION + "-" + spanContext.traceId + "-" + spanContext.spanId + "-" + traceFlags;
+
+const forEachStep = (self, fn, node, thisp) => {
+ let hit = node.value
+ if (isStale(self, hit)) {
+ del(self, node)
+ if (!self[ALLOW_STALE])
+ hit = undefined
+ }
+ if (hit)
+ fn.call(thisp, hit.value, hit.key, self)
}
-exports.NoOpSpan = NoOpSpan;
-exports.NoOpTracer = NoOpTracer;
-exports.OpenCensusSpanWrapper = OpenCensusSpanWrapper;
-exports.OpenCensusTracerWrapper = OpenCensusTracerWrapper;
-exports.TestSpan = TestSpan;
-exports.TestTracer = TestTracer;
-exports.extractSpanContextFromTraceParentHeader = extractSpanContextFromTraceParentHeader;
-exports.getTraceParentHeader = getTraceParentHeader;
-exports.getTracer = getTracer;
-exports.setTracer = setTracer;
-//# sourceMappingURL=index.js.map
+module.exports = LRUCache
/***/ }),
-/* 264 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
+/* 201 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
-exports.TrackerGroup = __webpack_require__(174)
-exports.Tracker = __webpack_require__(623)
-exports.TrackerStream = __webpack_require__(235)
-
-
-/***/ }),
-/* 265 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-"use strict";
+const fs = __webpack_require__(598)
+const BB = __webpack_require__(900)
+const chmod = BB.promisify(fs.chmod)
+const unlink = BB.promisify(fs.unlink)
+let move
+let pinflight
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
-}) : (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
- Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
- o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
- __setModuleDefault(result, mod);
- return result;
-};
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.authenticate = exports.authWithToken = exports.authWithCredentials = void 0;
-const core = __importStar(__webpack_require__(470));
-const cli = __importStar(__webpack_require__(986));
-/**
- * Authenticate at Expo using `expo login`.
- * This step is required for publishing and building new apps.
- * It uses the `EXPO_CLI_PASSWORD` environment variable for improved security.
- */
-function authWithCredentials(username, password) {
- return __awaiter(this, void 0, void 0, function* () {
- if (!username || !password) {
- return core.info('Skipping authentication: `expo-username` and/or `expo-password` not set...');
+module.exports = moveFile
+function moveFile (src, dest) {
+ // This isn't quite an fs.rename -- the assumption is that
+ // if `dest` already exists, and we get certain errors while
+ // trying to move it, we should just not bother.
+ //
+ // In the case of cache corruption, users will receive an
+ // EINTEGRITY error elsewhere, and can remove the offending
+ // content their own way.
+ //
+ // Note that, as the name suggests, this strictly only supports file moves.
+ return BB.fromNode(cb => {
+ fs.link(src, dest, err => {
+ if (err) {
+ if (err.code === 'EEXIST' || err.code === 'EBUSY') {
+ // file already exists, so whatever
+ } else if (err.code === 'EPERM' && process.platform === 'win32') {
+ // file handle stayed open even past graceful-fs limits
+ } else {
+ return cb(err)
}
- // github actions toolkit will handle commands with `.cmd` on windows, we need that
- const bin = process.platform === 'win32' ? 'expo.cmd' : 'expo';
- yield cli.exec(bin, ['login', `--username=${username}`], {
- env: Object.assign(Object.assign({}, process.env), { EXPO_CLI_PASSWORD: password }),
- });
- });
-}
-exports.authWithCredentials = authWithCredentials;
-/**
- * Authenticate with Expo using `EXPO_TOKEN`.
- * This exports the EXPO_TOKEN environment variable for all future steps within the workflow.
- * It also double-checks if this token is valid and for what user, by running `expo whoami`.
- *
- * @see https://github.com/actions/toolkit/blob/905b2c7b0681b11056141a60055f1ba77358b7e9/packages/core/src/core.ts#L39
- */
-function authWithToken(token) {
- return __awaiter(this, void 0, void 0, function* () {
- if (!token) {
- return core.info('Skipping authentication: `expo-token` not set...');
+ }
+ return cb()
+ })
+ }).then(() => {
+ // content should never change for any reason, so make it read-only
+ return BB.join(unlink(src), process.platform !== 'win32' && chmod(dest, '0444'))
+ }).catch(() => {
+ if (!pinflight) { pinflight = __webpack_require__(399) }
+ return pinflight('cacache-move-file:' + dest, () => {
+ return BB.promisify(fs.stat)(dest).catch(err => {
+ if (err.code !== 'ENOENT') {
+ // Something else is wrong here. Bail bail bail
+ throw err
}
- // github actions toolkit will handle commands with `.cmd` on windows, we need that
- const bin = process.platform === 'win32' ? 'expo.cmd' : 'expo';
- yield cli.exec(bin, ['whoami'], {
- env: Object.assign(Object.assign({}, process.env), { EXPO_TOKEN: token }),
- });
- core.exportVariable('EXPO_TOKEN', token);
- });
-}
-exports.authWithToken = authWithToken;
-/**
- * Authenticate with Expo using either the token or username/password method.
- * If both of them are set, token has priority.
- */
-function authenticate(options) {
- if (options.token) {
- return authWithToken(options.token);
- }
- if (options.username || options.password) {
- return authWithCredentials(options.username, options.password);
- }
- core.info('Skipping authentication: `expo-token`, `expo-username`, and/or `expo-password` not set...');
- return Promise.resolve();
+ // file doesn't already exist! let's try a rename -> copy fallback
+ if (!move) { move = __webpack_require__(184) }
+ return move(src, dest, { BB, fs })
+ })
+ })
+ })
}
-exports.authenticate = authenticate;
/***/ }),
-/* 266 */
+/* 202 */
/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
-var os = __webpack_require__(87);
-
-function homedir() {
- var env = process.env;
- var home = env.HOME;
- var user = env.LOGNAME || env.USER || env.LNAME || env.USERNAME;
-
- if (process.platform === 'win32') {
- return env.USERPROFILE || env.HOMEDRIVE + env.HOMEPATH || home || null;
- }
- if (process.platform === 'darwin') {
- return home || (user ? '/Users/' + user : null);
- }
+const figgyPudding = __webpack_require__(965)
+const getStream = __webpack_require__(145)
+const npmFetch = __webpack_require__(789)
- if (process.platform === 'linux') {
- return home || (process.getuid() === 0 ? '/root' : (user ? '/home/' + user : null));
- }
+const SearchOpts = figgyPudding({
+ detailed: { default: false },
+ limit: { default: 20 },
+ from: { default: 0 },
+ quality: { default: 0.65 },
+ popularity: { default: 0.98 },
+ maintenance: { default: 0.5 },
+ sortBy: {}
+})
- return home || null;
+module.exports = search
+function search (query, opts) {
+ return getStream.array(search.stream(query, opts))
+}
+search.stream = searchStream
+function searchStream (query, opts) {
+ opts = SearchOpts(opts)
+ switch (opts.sortBy) {
+ case 'optimal': {
+ opts = opts.concat({
+ quality: 0.65,
+ popularity: 0.98,
+ maintenance: 0.5
+ })
+ break
+ }
+ case 'quality': {
+ opts = opts.concat({
+ quality: 1,
+ popularity: 0,
+ maintenance: 0
+ })
+ break
+ }
+ case 'popularity': {
+ opts = opts.concat({
+ quality: 0,
+ popularity: 1,
+ maintenance: 0
+ })
+ break
+ }
+ case 'maintenance': {
+ opts = opts.concat({
+ quality: 0,
+ popularity: 0,
+ maintenance: 1
+ })
+ break
+ }
+ }
+ return npmFetch.json.stream('/-/v1/search', 'objects.*',
+ opts.concat({
+ query: {
+ text: Array.isArray(query) ? query.join(' ') : query,
+ size: opts.limit,
+ from: opts.from,
+ quality: opts.quality,
+ popularity: opts.popularity,
+ maintenance: opts.maintenance
+ },
+ mapJson (obj) {
+ if (obj.package.date) {
+ obj.package.date = new Date(obj.package.date)
+ }
+ if (opts.detailed) {
+ return obj
+ } else {
+ return obj.package
+ }
+ }
+ })
+ )
}
-
-module.exports = typeof os.homedir === 'function' ? os.homedir : homedir;
/***/ }),
-/* 267 */,
-/* 268 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
+/* 203 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
-const assert = __webpack_require__(357)
-const Buffer = __webpack_require__(293).Buffer
-const realZlib = __webpack_require__(761)
+// this[BUFFER] is the remainder of a chunk if we're waiting for
+// the full 512 bytes of a header to come in. We will Buffer.concat()
+// it to the next write(), which is a mem copy, but a small one.
+//
+// this[QUEUE] is a Yallist of entries that haven't been emitted
+// yet this can only get filled up if the user keeps write()ing after
+// a write() returns false, or does a write() with more than one entry
+//
+// We don't buffer chunks, we always parse them and either create an
+// entry, or push it into the active entry. The ReadEntry class knows
+// to throw data away if .ignore=true
+//
+// Shift entry off the buffer when it emits 'end', and emit 'entry' for
+// the next one in the list.
+//
+// At any time, we're pushing body chunks into the entry at WRITEENTRY,
+// and waiting for 'end' on the entry at READENTRY
+//
+// ignored entries get .resume() called on them straight away
-const constants = exports.constants = __webpack_require__(60)
-const Minipass = __webpack_require__(720)
+const warner = __webpack_require__(937)
+const path = __webpack_require__(622)
+const Header = __webpack_require__(725)
+const EE = __webpack_require__(614)
+const Yallist = __webpack_require__(612)
+const maxMetaEntrySize = 1024 * 1024
+const Entry = __webpack_require__(589)
+const Pax = __webpack_require__(853)
+const zlib = __webpack_require__(268)
+const Buffer = __webpack_require__(921)
-const OriginalBufferConcat = Buffer.concat
+const gzipHeader = Buffer.from([0x1f, 0x8b])
+const STATE = Symbol('state')
+const WRITEENTRY = Symbol('writeEntry')
+const READENTRY = Symbol('readEntry')
+const NEXTENTRY = Symbol('nextEntry')
+const PROCESSENTRY = Symbol('processEntry')
+const EX = Symbol('extendedHeader')
+const GEX = Symbol('globalExtendedHeader')
+const META = Symbol('meta')
+const EMITMETA = Symbol('emitMeta')
+const BUFFER = Symbol('buffer')
+const QUEUE = Symbol('queue')
+const ENDED = Symbol('ended')
+const EMITTEDEND = Symbol('emittedEnd')
+const EMIT = Symbol('emit')
+const UNZIP = Symbol('unzip')
+const CONSUMECHUNK = Symbol('consumeChunk')
+const CONSUMECHUNKSUB = Symbol('consumeChunkSub')
+const CONSUMEBODY = Symbol('consumeBody')
+const CONSUMEMETA = Symbol('consumeMeta')
+const CONSUMEHEADER = Symbol('consumeHeader')
+const CONSUMING = Symbol('consuming')
+const BUFFERCONCAT = Symbol('bufferConcat')
+const MAYBEEND = Symbol('maybeEnd')
+const WRITING = Symbol('writing')
+const ABORTED = Symbol('aborted')
+const DONE = Symbol('onDone')
-class ZlibError extends Error {
- constructor (err) {
- super('zlib: ' + err.message)
- this.code = err.code
- this.errno = err.errno
- /* istanbul ignore if */
- if (!this.code)
- this.code = 'ZLIB_ERROR'
+const noop = _ => true
- this.message = 'zlib: ' + err.message
- Error.captureStackTrace(this, this.constructor)
- }
+module.exports = warner(class Parser extends EE {
+ constructor (opt) {
+ opt = opt || {}
+ super(opt)
- get name () {
- return 'ZlibError'
- }
-}
+ if (opt.ondone)
+ this.on(DONE, opt.ondone)
+ else
+ this.on(DONE, _ => {
+ this.emit('prefinish')
+ this.emit('finish')
+ this.emit('end')
+ this.emit('close')
+ })
-// the Zlib class they all inherit from
-// This thing manages the queue of requests, and returns
-// true or false if there is anything in the queue when
-// you call the .write() method.
-const _opts = Symbol('opts')
-const _flushFlag = Symbol('flushFlag')
-const _finishFlushFlag = Symbol('finishFlushFlag')
-const _fullFlushFlag = Symbol('fullFlushFlag')
-const _handle = Symbol('handle')
-const _onError = Symbol('onError')
-const _sawError = Symbol('sawError')
-const _level = Symbol('level')
-const _strategy = Symbol('strategy')
-const _ended = Symbol('ended')
-const _defaultFullFlush = Symbol('_defaultFullFlush')
+ this.strict = !!opt.strict
+ this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize
+ this.filter = typeof opt.filter === 'function' ? opt.filter : noop
-class ZlibBase extends Minipass {
- constructor (opts, mode) {
- if (!opts || typeof opts !== 'object')
- throw new TypeError('invalid options for ZlibBase constructor')
+ // have to set this so that streams are ok piping into it
+ this.writable = true
+ this.readable = false
- super(opts)
- this[_ended] = false
- this[_opts] = opts
+ this[QUEUE] = new Yallist()
+ this[BUFFER] = null
+ this[READENTRY] = null
+ this[WRITEENTRY] = null
+ this[STATE] = 'begin'
+ this[META] = ''
+ this[EX] = null
+ this[GEX] = null
+ this[ENDED] = false
+ this[UNZIP] = null
+ this[ABORTED] = false
+ if (typeof opt.onwarn === 'function')
+ this.on('warn', opt.onwarn)
+ if (typeof opt.onentry === 'function')
+ this.on('entry', opt.onentry)
+ }
- this[_flushFlag] = opts.flush
- this[_finishFlushFlag] = opts.finishFlush
- // this will throw if any options are invalid for the class selected
+ [CONSUMEHEADER] (chunk, position) {
+ let header
try {
- this[_handle] = new realZlib[mode](opts)
+ header = new Header(chunk, position, this[EX], this[GEX])
} catch (er) {
- // make sure that all errors get decorated properly
- throw new ZlibError(er)
+ return this.warn('invalid entry', er)
}
- this[_onError] = (err) => {
- this[_sawError] = true
- // there is no way to cleanly recover.
- // continuing only obscures problems.
- this.close()
- this.emit('error', err)
- }
+ if (header.nullBlock)
+ this[EMIT]('nullBlock')
+ else if (!header.cksumValid)
+ this.warn('invalid entry', header)
+ else if (!header.path)
+ this.warn('invalid: path is required', header)
+ else {
+ const type = header.type
+ if (/^(Symbolic)?Link$/.test(type) && !header.linkpath)
+ this.warn('invalid: linkpath required', header)
+ else if (!/^(Symbolic)?Link$/.test(type) && header.linkpath)
+ this.warn('invalid: linkpath forbidden', header)
+ else {
+ const entry = this[WRITEENTRY] = new Entry(header, this[EX], this[GEX])
- this[_handle].on('error', er => this[_onError](new ZlibError(er)))
- this.once('end', () => this.close)
- }
+ if (entry.meta) {
+ if (entry.size > this.maxMetaEntrySize) {
+ entry.ignore = true
+ this[EMIT]('ignoredEntry', entry)
+ this[STATE] = 'ignore'
+ } else if (entry.size > 0) {
+ this[META] = ''
+ entry.on('data', c => this[META] += c)
+ this[STATE] = 'meta'
+ }
+ } else {
- close () {
- if (this[_handle]) {
- this[_handle].close()
- this[_handle] = null
- this.emit('close')
+ this[EX] = null
+ entry.ignore = entry.ignore || !this.filter(entry.path, entry)
+ if (entry.ignore) {
+ this[EMIT]('ignoredEntry', entry)
+ this[STATE] = entry.remain ? 'ignore' : 'begin'
+ } else {
+ if (entry.remain)
+ this[STATE] = 'body'
+ else {
+ this[STATE] = 'begin'
+ entry.end()
+ }
+
+ if (!this[READENTRY]) {
+ this[QUEUE].push(entry)
+ this[NEXTENTRY]()
+ } else
+ this[QUEUE].push(entry)
+ }
+ }
+ }
}
}
- reset () {
- if (!this[_sawError]) {
- assert(this[_handle], 'zlib binding closed')
- return this[_handle].reset()
+ [PROCESSENTRY] (entry) {
+ let go = true
+
+ if (!entry) {
+ this[READENTRY] = null
+ go = false
+ } else if (Array.isArray(entry))
+ this.emit.apply(this, entry)
+ else {
+ this[READENTRY] = entry
+ this.emit('entry', entry)
+ if (!entry.emittedEnd) {
+ entry.on('end', _ => this[NEXTENTRY]())
+ go = false
+ }
}
+
+ return go
}
- flush (flushFlag) {
- if (this.ended)
- return
+ [NEXTENTRY] () {
+ do {} while (this[PROCESSENTRY](this[QUEUE].shift()))
- if (typeof flushFlag !== 'number')
- flushFlag = this[_fullFlushFlag]
- this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag }))
+ if (!this[QUEUE].length) {
+ // At this point, there's nothing in the queue, but we may have an
+ // entry which is being consumed (readEntry).
+ // If we don't, then we definitely can handle more data.
+ // If we do, and either it's flowing, or it has never had any data
+ // written to it, then it needs more.
+ // The only other possibility is that it has returned false from a
+ // write() call, so we wait for the next drain to continue.
+ const re = this[READENTRY]
+ const drainNow = !re || re.flowing || re.size === re.remain
+ if (drainNow) {
+ if (!this[WRITING])
+ this.emit('drain')
+ } else
+ re.once('drain', _ => this.emit('drain'))
+ }
}
- end (chunk, encoding, cb) {
- if (chunk)
- this.write(chunk, encoding)
- this.flush(this[_finishFlushFlag])
- this[_ended] = true
- return super.end(null, null, cb)
+ [CONSUMEBODY] (chunk, position) {
+ // write up to but no more than writeEntry.blockRemain
+ const entry = this[WRITEENTRY]
+ const br = entry.blockRemain
+ const c = (br >= chunk.length && position === 0) ? chunk
+ : chunk.slice(position, position + br)
+
+ entry.write(c)
+
+ if (!entry.blockRemain) {
+ this[STATE] = 'begin'
+ this[WRITEENTRY] = null
+ entry.end()
+ }
+
+ return c.length
}
- get ended () {
- return this[_ended]
+ [CONSUMEMETA] (chunk, position) {
+ const entry = this[WRITEENTRY]
+ const ret = this[CONSUMEBODY](chunk, position)
+
+ // if we finished, then the entry is reset
+ if (!this[WRITEENTRY])
+ this[EMITMETA](entry)
+
+ return ret
}
- write (chunk, encoding, cb) {
- // process the chunk using the sync process
- // then super.write() all the outputted chunks
- if (typeof encoding === 'function')
- cb = encoding, encoding = 'utf8'
+ [EMIT] (ev, data, extra) {
+ if (!this[QUEUE].length && !this[READENTRY])
+ this.emit(ev, data, extra)
+ else
+ this[QUEUE].push([ev, data, extra])
+ }
- if (typeof chunk === 'string')
- chunk = Buffer.from(chunk, encoding)
+ [EMITMETA] (entry) {
+ this[EMIT]('meta', this[META])
+ switch (entry.type) {
+ case 'ExtendedHeader':
+ case 'OldExtendedHeader':
+ this[EX] = Pax.parse(this[META], this[EX], false)
+ break
- if (this[_sawError])
- return
- assert(this[_handle], 'zlib binding closed')
+ case 'GlobalExtendedHeader':
+ this[GEX] = Pax.parse(this[META], this[GEX], true)
+ break
- // _processChunk tries to .close() the native handle after it's done, so we
- // intercept that by temporarily making it a no-op.
- const nativeHandle = this[_handle]._handle
- const originalNativeClose = nativeHandle.close
- nativeHandle.close = () => {}
- const originalClose = this[_handle].close
- this[_handle].close = () => {}
- // It also calls `Buffer.concat()` at the end, which may be convenient
- // for some, but which we are not interested in as it slows us down.
- Buffer.concat = (args) => args
- let result
- try {
- const flushFlag = typeof chunk[_flushFlag] === 'number'
- ? chunk[_flushFlag] : this[_flushFlag]
- result = this[_handle]._processChunk(chunk, flushFlag)
- // if we don't throw, reset it back how it was
- Buffer.concat = OriginalBufferConcat
- } catch (err) {
- // or if we do, put Buffer.concat() back before we emit error
- // Error events call into user code, which may call Buffer.concat()
- Buffer.concat = OriginalBufferConcat
- this[_onError](new ZlibError(err))
- } finally {
- if (this[_handle]) {
- // Core zlib resets `_handle` to null after attempting to close the
- // native handle. Our no-op handler prevented actual closure, but we
- // need to restore the `._handle` property.
- this[_handle]._handle = nativeHandle
- nativeHandle.close = originalNativeClose
- this[_handle].close = originalClose
- // `_processChunk()` adds an 'error' listener. If we don't remove it
- // after each call, these handlers start piling up.
- this[_handle].removeAllListeners('error')
- }
- }
+ case 'NextFileHasLongPath':
+ case 'OldGnuLongPath':
+ this[EX] = this[EX] || Object.create(null)
+ this[EX].path = this[META].replace(/\0.*/, '')
+ break
- let writeReturn
- if (result) {
- if (Array.isArray(result) && result.length > 0) {
- // The first buffer is always `handle._outBuffer`, which would be
- // re-used for later invocations; so, we always have to copy that one.
- writeReturn = super.write(Buffer.from(result[0]))
- for (let i = 1; i < result.length; i++) {
- writeReturn = super.write(result[i])
- }
- } else {
- writeReturn = super.write(Buffer.from(result))
- }
- }
+ case 'NextFileHasLongLinkpath':
+ this[EX] = this[EX] || Object.create(null)
+ this[EX].linkpath = this[META].replace(/\0.*/, '')
+ break
- if (cb)
- cb()
- return writeReturn
+ /* istanbul ignore next */
+ default: throw new Error('unknown meta: ' + entry.type)
+ }
}
-}
-
-class Zlib extends ZlibBase {
- constructor (opts, mode) {
- opts = opts || {}
-
- opts.flush = opts.flush || constants.Z_NO_FLUSH
- opts.finishFlush = opts.finishFlush || constants.Z_FINISH
- super(opts, mode)
- this[_fullFlushFlag] = constants.Z_FULL_FLUSH
- this[_level] = opts.level
- this[_strategy] = opts.strategy
+ abort (msg, error) {
+ this[ABORTED] = true
+ this.warn(msg, error)
+ this.emit('abort', error)
+ this.emit('error', error)
}
- params (level, strategy) {
- if (this[_sawError])
+ write (chunk) {
+ if (this[ABORTED])
return
- if (!this[_handle])
- throw new Error('cannot switch params when binding is closed')
-
- // no way to test this without also not supporting params at all
- /* istanbul ignore if */
- if (!this[_handle].params)
- throw new Error('not supported in this implementation')
-
- if (this[_level] !== level || this[_strategy] !== strategy) {
- this.flush(constants.Z_SYNC_FLUSH)
- assert(this[_handle], 'zlib binding closed')
- // .params() calls .flush(), but the latter is always async in the
- // core zlib. We override .flush() temporarily to intercept that and
- // flush synchronously.
- const origFlush = this[_handle].flush
- this[_handle].flush = (flushFlag, cb) => {
- this.flush(flushFlag)
- cb()
+ // first write, might be gzipped
+ if (this[UNZIP] === null && chunk) {
+ if (this[BUFFER]) {
+ chunk = Buffer.concat([this[BUFFER], chunk])
+ this[BUFFER] = null
}
- try {
- this[_handle].params(level, strategy)
- } finally {
- this[_handle].flush = origFlush
+ if (chunk.length < gzipHeader.length) {
+ this[BUFFER] = chunk
+ return true
}
- /* istanbul ignore else */
- if (this[_handle]) {
- this[_level] = level
- this[_strategy] = strategy
+ for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) {
+ if (chunk[i] !== gzipHeader[i])
+ this[UNZIP] = false
+ }
+ if (this[UNZIP] === null) {
+ const ended = this[ENDED]
+ this[ENDED] = false
+ this[UNZIP] = new zlib.Unzip()
+ this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk))
+ this[UNZIP].on('error', er =>
+ this.abort(er.message, er))
+ this[UNZIP].on('end', _ => {
+ this[ENDED] = true
+ this[CONSUMECHUNK]()
+ })
+ this[WRITING] = true
+ const ret = this[UNZIP][ended ? 'end' : 'write' ](chunk)
+ this[WRITING] = false
+ return ret
}
}
- }
-}
-// minimal 2-byte header
-class Deflate extends Zlib {
- constructor (opts) {
- super(opts, 'Deflate')
- }
-}
+ this[WRITING] = true
+ if (this[UNZIP])
+ this[UNZIP].write(chunk)
+ else
+ this[CONSUMECHUNK](chunk)
+ this[WRITING] = false
-class Inflate extends Zlib {
- constructor (opts) {
- super(opts, 'Inflate')
- }
-}
+ // return false if there's a queue, or if the current entry isn't flowing
+ const ret =
+ this[QUEUE].length ? false :
+ this[READENTRY] ? this[READENTRY].flowing :
+ true
-// gzip - bigger header, same deflate compression
-class Gzip extends Zlib {
- constructor (opts) {
- super(opts, 'Gzip')
- }
-}
+ // if we have no queue, then that means a clogged READENTRY
+ if (!ret && !this[QUEUE].length)
+ this[READENTRY].once('drain', _ => this.emit('drain'))
-class Gunzip extends Zlib {
- constructor (opts) {
- super(opts, 'Gunzip')
+ return ret
}
-}
-// raw - no header
-class DeflateRaw extends Zlib {
- constructor (opts) {
- super(opts, 'DeflateRaw')
+ [BUFFERCONCAT] (c) {
+ if (c && !this[ABORTED])
+ this[BUFFER] = this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c
}
-}
-class InflateRaw extends Zlib {
- constructor (opts) {
- super(opts, 'InflateRaw')
+ [MAYBEEND] () {
+ if (this[ENDED] &&
+ !this[EMITTEDEND] &&
+ !this[ABORTED] &&
+ !this[CONSUMING]) {
+ this[EMITTEDEND] = true
+ const entry = this[WRITEENTRY]
+ if (entry && entry.blockRemain) {
+ const have = this[BUFFER] ? this[BUFFER].length : 0
+ this.warn('Truncated input (needed ' + entry.blockRemain +
+ ' more bytes, only ' + have + ' available)', entry)
+ if (this[BUFFER])
+ entry.write(this[BUFFER])
+ entry.end()
+ }
+ this[EMIT](DONE)
+ }
}
-}
-// auto-detect header.
-class Unzip extends Zlib {
- constructor (opts) {
- super(opts, 'Unzip')
- }
-}
+ [CONSUMECHUNK] (chunk) {
+ if (this[CONSUMING]) {
+ this[BUFFERCONCAT](chunk)
+ } else if (!chunk && !this[BUFFER]) {
+ this[MAYBEEND]()
+ } else {
+ this[CONSUMING] = true
+ if (this[BUFFER]) {
+ this[BUFFERCONCAT](chunk)
+ const c = this[BUFFER]
+ this[BUFFER] = null
+ this[CONSUMECHUNKSUB](c)
+ } else {
+ this[CONSUMECHUNKSUB](chunk)
+ }
-class Brotli extends ZlibBase {
- constructor (opts, mode) {
- opts = opts || {}
+ while (this[BUFFER] && this[BUFFER].length >= 512 && !this[ABORTED]) {
+ const c = this[BUFFER]
+ this[BUFFER] = null
+ this[CONSUMECHUNKSUB](c)
+ }
+ this[CONSUMING] = false
+ }
- opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS
- opts.finishFlush = opts.finishFlush || constants.BROTLI_OPERATION_FINISH
+ if (!this[BUFFER] || this[ENDED])
+ this[MAYBEEND]()
+ }
- super(opts, mode)
+ [CONSUMECHUNKSUB] (chunk) {
+ // we know that we are in CONSUMING mode, so anything written goes into
+ // the buffer. Advance the position and put any remainder in the buffer.
+ let position = 0
+ let length = chunk.length
+ while (position + 512 <= length && !this[ABORTED]) {
+ switch (this[STATE]) {
+ case 'begin':
+ this[CONSUMEHEADER](chunk, position)
+ position += 512
+ break
- this[_fullFlushFlag] = constants.BROTLI_OPERATION_FLUSH
- }
-}
+ case 'ignore':
+ case 'body':
+ position += this[CONSUMEBODY](chunk, position)
+ break
-class BrotliCompress extends Brotli {
- constructor (opts) {
- super(opts, 'BrotliCompress')
- }
-}
+ case 'meta':
+ position += this[CONSUMEMETA](chunk, position)
+ break
-class BrotliDecompress extends Brotli {
- constructor (opts) {
- super(opts, 'BrotliDecompress')
+ /* istanbul ignore next */
+ default:
+ throw new Error('invalid state: ' + this[STATE])
+ }
+ }
+
+ if (position < length) {
+ if (this[BUFFER])
+ this[BUFFER] = Buffer.concat([chunk.slice(position), this[BUFFER]])
+ else
+ this[BUFFER] = chunk.slice(position)
+ }
}
-}
-exports.Deflate = Deflate
-exports.Inflate = Inflate
-exports.Gzip = Gzip
-exports.Gunzip = Gunzip
-exports.DeflateRaw = DeflateRaw
-exports.InflateRaw = InflateRaw
-exports.Unzip = Unzip
-/* istanbul ignore else */
-if (typeof realZlib.BrotliCompress === 'function') {
- exports.BrotliCompress = BrotliCompress
- exports.BrotliDecompress = BrotliDecompress
-} else {
- exports.BrotliCompress = exports.BrotliDecompress = class {
- constructor () {
- throw new Error('Brotli is not supported in this version of Node.js')
+ end (chunk) {
+ if (!this[ABORTED]) {
+ if (this[UNZIP])
+ this[UNZIP].end(chunk)
+ else {
+ this[ENDED] = true
+ this.write(chunk)
+ }
}
}
-}
+})
/***/ }),
-/* 269 */
-/***/ (function(module, exports, __webpack_require__) {
+/* 204 */,
+/* 205 */,
+/* 206 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
-"use strict";
+const debug = __webpack_require__(548)
+const { MAX_LENGTH, MAX_SAFE_INTEGER } = __webpack_require__(181)
+const { re, t } = __webpack_require__(328)
+const parseOptions = __webpack_require__(143)
+const { compareIdentifiers } = __webpack_require__(760)
+class SemVer {
+ constructor (version, options) {
+ options = parseOptions(options)
-/**
- * index.js
- *
- * a request API compatible with window.fetch
- */
+ if (version instanceof SemVer) {
+ if (version.loose === !!options.loose &&
+ version.includePrerelease === !!options.includePrerelease) {
+ return version
+ } else {
+ version = version.version
+ }
+ } else if (typeof version !== 'string') {
+ throw new TypeError(`Invalid Version: ${version}`)
+ }
-const url = __webpack_require__(835)
-const http = __webpack_require__(605)
-const https = __webpack_require__(211)
-const zlib = __webpack_require__(761)
-const PassThrough = __webpack_require__(794).PassThrough
+ if (version.length > MAX_LENGTH) {
+ throw new TypeError(
+ `version is longer than ${MAX_LENGTH} characters`
+ )
+ }
-const Body = __webpack_require__(542)
-const writeToStream = Body.writeToStream
-const Response = __webpack_require__(901)
-const Headers = __webpack_require__(68)
-const Request = __webpack_require__(988)
-const getNodeRequestOptions = Request.getNodeRequestOptions
-const FetchError = __webpack_require__(888)
-const isURL = /^https?:/
+ debug('SemVer', version, options)
+ this.options = options
+ this.loose = !!options.loose
+ // this isn't actually relevant for versions, but keep it so that we
+ // don't run into trouble passing this.options around.
+ this.includePrerelease = !!options.includePrerelease
-/**
- * Fetch function
- *
- * @param Mixed url Absolute url or Request instance
- * @param Object opts Fetch options
- * @return Promise
- */
-exports = module.exports = fetch
-function fetch (uri, opts) {
- // allow custom promise
- if (!fetch.Promise) {
- throw new Error('native promise missing, set fetch.Promise to your favorite alternative')
- }
+ const m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL])
- Body.Promise = fetch.Promise
+ if (!m) {
+ throw new TypeError(`Invalid Version: ${version}`)
+ }
- // wrap http.request into fetch
- return new fetch.Promise((resolve, reject) => {
- // build request object
- const request = new Request(uri, opts)
- const options = getNodeRequestOptions(request)
+ this.raw = version
- const send = (options.protocol === 'https:' ? https : http).request
+ // these are actually numbers
+ this.major = +m[1]
+ this.minor = +m[2]
+ this.patch = +m[3]
- // http.request only support string as host header, this hack make custom host header possible
- if (options.headers.host) {
- options.headers.host = options.headers.host[0]
+ if (this.major > MAX_SAFE_INTEGER || this.major < 0) {
+ throw new TypeError('Invalid major version')
}
- // send request
- const req = send(options)
- let reqTimeout
+ if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) {
+ throw new TypeError('Invalid minor version')
+ }
- if (request.timeout) {
- req.once('socket', socket => {
- reqTimeout = setTimeout(() => {
- req.abort()
- reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'))
- }, request.timeout)
+ if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) {
+ throw new TypeError('Invalid patch version')
+ }
+
+ // numberify any prerelease numeric ids
+ if (!m[4]) {
+ this.prerelease = []
+ } else {
+ this.prerelease = m[4].split('.').map((id) => {
+ if (/^[0-9]+$/.test(id)) {
+ const num = +id
+ if (num >= 0 && num < MAX_SAFE_INTEGER) {
+ return num
+ }
+ }
+ return id
})
}
- req.on('error', err => {
- clearTimeout(reqTimeout)
- reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err))
- })
+ this.build = m[5] ? m[5].split('.') : []
+ this.format()
+ }
- req.on('response', res => {
- clearTimeout(reqTimeout)
+ format () {
+ this.version = `${this.major}.${this.minor}.${this.patch}`
+ if (this.prerelease.length) {
+ this.version += `-${this.prerelease.join('.')}`
+ }
+ return this.version
+ }
- // handle redirect
- if (fetch.isRedirect(res.statusCode) && request.redirect !== 'manual') {
- if (request.redirect === 'error') {
- reject(new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect'))
- return
- }
+ toString () {
+ return this.version
+ }
- if (request.counter >= request.follow) {
- reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'))
- return
- }
+ compare (other) {
+ debug('SemVer.compare', this.version, this.options, other)
+ if (!(other instanceof SemVer)) {
+ if (typeof other === 'string' && other === this.version) {
+ return 0
+ }
+ other = new SemVer(other, this.options)
+ }
- if (!res.headers.location) {
- reject(new FetchError(`redirect location header missing at: ${request.url}`, 'invalid-redirect'))
- return
- }
+ if (other.version === this.version) {
+ return 0
+ }
- // Comment and logic below is used under the following license:
- // Copyright (c) 2010-2012 Mikeal Rogers
- // Licensed under the Apache License, Version 2.0 (the "License");
- // you may not use this file except in compliance with the License.
- // You may obtain a copy of the License at
- // http://www.apache.org/licenses/LICENSE-2.0
- // Unless required by applicable law or agreed to in writing,
- // software distributed under the License is distributed on an "AS
- // IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- // express or implied. See the License for the specific language
- // governing permissions and limitations under the License.
+ return this.compareMain(other) || this.comparePre(other)
+ }
- // Remove authorization if changing hostnames (but not if just
- // changing ports or protocols). This matches the behavior of request:
- // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138
- const resolvedUrl = url.resolve(request.url, res.headers.location)
- let redirectURL = ''
- if (!isURL.test(res.headers.location)) {
- redirectURL = url.parse(resolvedUrl)
- } else {
- redirectURL = url.parse(res.headers.location)
- }
- if (url.parse(request.url).hostname !== redirectURL.hostname) {
- request.headers.delete('authorization')
- }
+ compareMain (other) {
+ if (!(other instanceof SemVer)) {
+ other = new SemVer(other, this.options)
+ }
- // per fetch spec, for POST request with 301/302 response, or any request with 303 response, use GET when following redirect
- if (res.statusCode === 303 ||
- ((res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST')) {
- request.method = 'GET'
- request.body = null
- request.headers.delete('content-length')
- }
+ return (
+ compareIdentifiers(this.major, other.major) ||
+ compareIdentifiers(this.minor, other.minor) ||
+ compareIdentifiers(this.patch, other.patch)
+ )
+ }
- request.counter++
+ comparePre (other) {
+ if (!(other instanceof SemVer)) {
+ other = new SemVer(other, this.options)
+ }
- resolve(fetch(resolvedUrl, request))
- return
+ // NOT having a prerelease is > having one
+ if (this.prerelease.length && !other.prerelease.length) {
+ return -1
+ } else if (!this.prerelease.length && other.prerelease.length) {
+ return 1
+ } else if (!this.prerelease.length && !other.prerelease.length) {
+ return 0
+ }
+
+ let i = 0
+ do {
+ const a = this.prerelease[i]
+ const b = other.prerelease[i]
+ debug('prerelease compare', i, a, b)
+ if (a === undefined && b === undefined) {
+ return 0
+ } else if (b === undefined) {
+ return 1
+ } else if (a === undefined) {
+ return -1
+ } else if (a === b) {
+ continue
+ } else {
+ return compareIdentifiers(a, b)
}
+ } while (++i)
+ }
- // normalize location header for manual redirect mode
- const headers = new Headers()
- for (const name of Object.keys(res.headers)) {
- if (Array.isArray(res.headers[name])) {
- for (const val of res.headers[name]) {
- headers.append(name, val)
- }
+ compareBuild (other) {
+ if (!(other instanceof SemVer)) {
+ other = new SemVer(other, this.options)
+ }
+
+ let i = 0
+ do {
+ const a = this.build[i]
+ const b = other.build[i]
+ debug('prerelease compare', i, a, b)
+ if (a === undefined && b === undefined) {
+ return 0
+ } else if (b === undefined) {
+ return 1
+ } else if (a === undefined) {
+ return -1
+ } else if (a === b) {
+ continue
+ } else {
+ return compareIdentifiers(a, b)
+ }
+ } while (++i)
+ }
+
+ // preminor will bump the version up to the next minor release, and immediately
+ // down to pre-release. premajor and prepatch work the same way.
+ inc (release, identifier) {
+ switch (release) {
+ case 'premajor':
+ this.prerelease.length = 0
+ this.patch = 0
+ this.minor = 0
+ this.major++
+ this.inc('pre', identifier)
+ break
+ case 'preminor':
+ this.prerelease.length = 0
+ this.patch = 0
+ this.minor++
+ this.inc('pre', identifier)
+ break
+ case 'prepatch':
+ // If this is already a prerelease, it will bump to the next version
+ // drop any prereleases that might already exist, since they are not
+ // relevant at this point.
+ this.prerelease.length = 0
+ this.inc('patch', identifier)
+ this.inc('pre', identifier)
+ break
+ // If the input is a non-prerelease version, this acts the same as
+ // prepatch.
+ case 'prerelease':
+ if (this.prerelease.length === 0) {
+ this.inc('patch', identifier)
+ }
+ this.inc('pre', identifier)
+ break
+
+ case 'major':
+ // If this is a pre-major version, bump up to the same major version.
+ // Otherwise increment major.
+ // 1.0.0-5 bumps to 1.0.0
+ // 1.1.0 bumps to 2.0.0
+ if (
+ this.minor !== 0 ||
+ this.patch !== 0 ||
+ this.prerelease.length === 0
+ ) {
+ this.major++
+ }
+ this.minor = 0
+ this.patch = 0
+ this.prerelease = []
+ break
+ case 'minor':
+ // If this is a pre-minor version, bump up to the same minor version.
+ // Otherwise increment minor.
+ // 1.2.0-5 bumps to 1.2.0
+ // 1.2.1 bumps to 1.3.0
+ if (this.patch !== 0 || this.prerelease.length === 0) {
+ this.minor++
+ }
+ this.patch = 0
+ this.prerelease = []
+ break
+ case 'patch':
+ // If this is not a pre-release version, it will increment the patch.
+ // If it is a pre-release it will bump up to the same patch version.
+ // 1.2.0-5 patches to 1.2.0
+ // 1.2.0 patches to 1.2.1
+ if (this.prerelease.length === 0) {
+ this.patch++
+ }
+ this.prerelease = []
+ break
+ // This probably shouldn't be used publicly.
+ // 1.0.0 'pre' would become 1.0.0-0 which is the wrong direction.
+ case 'pre':
+ if (this.prerelease.length === 0) {
+ this.prerelease = [0]
} else {
- headers.append(name, res.headers[name])
+ let i = this.prerelease.length
+ while (--i >= 0) {
+ if (typeof this.prerelease[i] === 'number') {
+ this.prerelease[i]++
+ i = -2
+ }
+ }
+ if (i === -1) {
+ // didn't increment anything
+ this.prerelease.push(0)
+ }
}
- }
- if (request.redirect === 'manual' && headers.has('location')) {
- headers.set('location', url.resolve(request.url, headers.get('location')))
- }
+ if (identifier) {
+ // 1.2.0-beta.1 bumps to 1.2.0-beta.2,
+ // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
+ if (this.prerelease[0] === identifier) {
+ if (isNaN(this.prerelease[1])) {
+ this.prerelease = [identifier, 0]
+ }
+ } else {
+ this.prerelease = [identifier, 0]
+ }
+ }
+ break
- // prepare response
- let body = res.pipe(new PassThrough())
- const responseOptions = {
- url: request.url,
- status: res.statusCode,
- statusText: res.statusMessage,
- headers: headers,
- size: request.size,
- timeout: request.timeout
- }
+ default:
+ throw new Error(`invalid increment argument: ${release}`)
+ }
+ this.format()
+ this.raw = this.version
+ return this
+ }
+}
- // HTTP-network fetch step 16.1.2
- const codings = headers.get('Content-Encoding')
+module.exports = SemVer
- // HTTP-network fetch step 16.1.3: handle content codings
- // in following scenarios we ignore compression support
- // 1. compression support is disabled
- // 2. HEAD request
- // 3. no Content-Encoding header
- // 4. no content response (204)
- // 5. content not modified response (304)
- if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {
- resolve(new Response(body, responseOptions))
- return
- }
+/***/ }),
+/* 207 */,
+/* 208 */,
+/* 209 */,
+/* 210 */
+/***/ (function(__unusedmodule, exports) {
- // Be less strict when decoding compressed responses, since sometimes
- // servers send slightly invalid responses that are still accepted
- // by common browsers.
- // Always using Z_SYNC_FLUSH is what cURL does.
- const zlibOptions = {
- flush: zlib.Z_SYNC_FLUSH,
- finishFlush: zlib.Z_SYNC_FLUSH
- }
+// Generated by CoffeeScript 1.12.7
+(function() {
+ "use strict";
+ exports.stripBOM = function(str) {
+ if (str[0] === '\uFEFF') {
+ return str.substring(1);
+ } else {
+ return str;
+ }
+ };
- // for gzip
- if (codings === 'gzip' || codings === 'x-gzip') {
- body = body.pipe(zlib.createGunzip(zlibOptions))
- resolve(new Response(body, responseOptions))
- return
- }
+}).call(this);
- // for deflate
- if (codings === 'deflate' || codings === 'x-deflate') {
- // handle the infamous raw deflate response from old servers
- // a hack for old IIS and Apache servers
- const raw = res.pipe(new PassThrough())
- raw.once('data', chunk => {
- // see http://stackoverflow.com/questions/37519828
- if ((chunk[0] & 0x0F) === 0x08) {
- body = body.pipe(zlib.createInflate(zlibOptions))
+
+/***/ }),
+/* 211 */
+/***/ (function(module) {
+
+module.exports = require("https");
+
+/***/ }),
+/* 212 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+
+const BB = __webpack_require__(900)
+
+const cacache = __webpack_require__(426)
+const Fetcher = __webpack_require__(177)
+const fs = __webpack_require__(747)
+const pipe = BB.promisify(__webpack_require__(371).pipe)
+const through = __webpack_require__(371).through
+
+const readFileAsync = BB.promisify(fs.readFile)
+const statAsync = BB.promisify(fs.stat)
+
+const MAX_BULK_SIZE = 2 * 1024 * 1024 // 2MB
+
+// `file` packages refer to local tarball files.
+const fetchFile = module.exports = Object.create(null)
+
+Fetcher.impl(fetchFile, {
+ packument (spec, opts) {
+ return BB.reject(new Error('Not implemented yet'))
+ },
+
+ manifest (spec, opts) {
+ // We can't do much here. `finalizeManifest` will take care of
+ // calling `tarball` to fill out all the necessary details.
+ return BB.resolve(null)
+ },
+
+ // All the heavy lifting for `file` packages is done here.
+ // They're never cached. We just read straight out of the file.
+ // TODO - maybe they *should* be cached?
+ tarball (spec, opts) {
+ const src = spec._resolved || spec.fetchSpec
+ const stream = through()
+ statAsync(src).then(stat => {
+ if (spec._resolved) { stream.emit('manifest', spec) }
+ if (stat.size <= MAX_BULK_SIZE) {
+ // YAY LET'S DO THING IN BULK
+ return readFileAsync(src).then(data => {
+ if (opts.cache) {
+ return cacache.put(
+ opts.cache, `pacote:tarball:file:${src}`, data, {
+ integrity: opts.integrity
+ }
+ ).then(integrity => ({ data, integrity }))
} else {
- body = body.pipe(zlib.createInflateRaw(zlibOptions))
+ return { data }
}
- resolve(new Response(body, responseOptions))
+ }).then(info => {
+ if (info.integrity) { stream.emit('integrity', info.integrity) }
+ stream.write(info.data, () => {
+ stream.end()
+ })
+ })
+ } else {
+ let integrity
+ const cacheWriter = !opts.cache
+ ? BB.resolve(null)
+ : (pipe(
+ fs.createReadStream(src),
+ cacache.put.stream(opts.cache, `pacote:tarball:${src}`, {
+ integrity: opts.integrity
+ }).on('integrity', d => { integrity = d })
+ ))
+ return cacheWriter.then(() => {
+ if (integrity) { stream.emit('integrity', integrity) }
+ return pipe(fs.createReadStream(src), stream)
})
- return
}
+ }).catch(err => stream.emit('error', err))
+ return stream
+ },
- // otherwise, use response as-is
- resolve(new Response(body, responseOptions))
- })
-
- writeToStream(req, request)
- })
-};
+ fromManifest (manifest, spec, opts) {
+ return this.tarball(manifest || spec, opts)
+ }
+})
-/**
- * Redirect code matching
- *
- * @param Number code Status code
- * @return Boolean
- */
-fetch.isRedirect = code => code === 301 || code === 302 || code === 303 || code === 307 || code === 308
-// expose Promise
-fetch.Promise = global.Promise
-exports.Headers = Headers
-exports.Request = Request
-exports.Response = Response
-exports.FetchError = FetchError
+/***/ }),
+/* 213 */
+/***/ (function(module) {
+module.exports = require("punycode");
/***/ }),
-/* 270 */
+/* 214 */,
+/* 215 */
/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
+/* eslint-disable node/no-deprecated-api */
-module.exports = __webpack_require__(789)
+var buffer = __webpack_require__(293)
+var Buffer = buffer.Buffer
-/***/ }),
-/* 271 */
-/***/ (function(module) {
+var safer = {}
-"use strict";
+var key
+
+for (key in buffer) {
+ if (!buffer.hasOwnProperty(key)) continue
+ if (key === 'SlowBuffer' || key === 'Buffer') continue
+ safer[key] = buffer[key]
+}
+var Safer = safer.Buffer = {}
+for (key in Buffer) {
+ if (!Buffer.hasOwnProperty(key)) continue
+ if (key === 'allocUnsafe' || key === 'allocUnsafeSlow') continue
+ Safer[key] = Buffer[key]
+}
-module.exports = hashToSegments
+safer.Buffer.prototype = Buffer.prototype
-function hashToSegments (hash) {
- return [
- hash.slice(0, 2),
- hash.slice(2, 4),
- hash.slice(4)
- ]
+if (!Safer.from || Safer.from === Uint8Array.from) {
+ Safer.from = function (value, encodingOrOffset, length) {
+ if (typeof value === 'number') {
+ throw new TypeError('The "value" argument must not be of type number. Received type ' + typeof value)
+ }
+ if (value && typeof value.length === 'undefined') {
+ throw new TypeError('The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type ' + typeof value)
+ }
+ return Buffer(value, encodingOrOffset, length)
+ }
+}
+
+if (!Safer.alloc) {
+ Safer.alloc = function (size, fill, encoding) {
+ if (typeof size !== 'number') {
+ throw new TypeError('The "size" argument must be of type number. Received type ' + typeof size)
+ }
+ if (size < 0 || size >= 2 * (1 << 30)) {
+ throw new RangeError('The value "' + size + '" is invalid for option "size"')
+ }
+ var buf = Buffer(size)
+ if (!fill || fill.length === 0) {
+ buf.fill(0)
+ } else if (typeof encoding === 'string') {
+ buf.fill(fill, encoding)
+ } else {
+ buf.fill(fill)
+ }
+ return buf
+ }
+}
+
+if (!safer.kStringMaxLength) {
+ try {
+ safer.kStringMaxLength = process.binding('buffer').kStringMaxLength
+ } catch (e) {
+ // we can't determine kStringMaxLength in environments where process.binding
+ // is unsupported, so let's not set it
+ }
+}
+
+if (!safer.constants) {
+ safer.constants = {
+ MAX_LENGTH: safer.kMaxLength
+ }
+ if (safer.kStringMaxLength) {
+ safer.constants.MAX_STRING_LENGTH = safer.kStringMaxLength
+ }
}
+module.exports = safer
+
/***/ }),
-/* 272 */
+/* 216 */,
+/* 217 */,
+/* 218 */
/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
-module.exports = function(Promise, Context,
- enableAsyncHooks, disableAsyncHooks) {
-var async = Promise._async;
-var Warning = __webpack_require__(351).Warning;
-var util = __webpack_require__(248);
-var es5 = __webpack_require__(883);
-var canAttachTrace = util.canAttachTrace;
-var unhandledRejectionHandled;
-var possiblyUnhandledRejection;
-var bluebirdFramePattern =
- /[\\\/]bluebird[\\\/]js[\\\/](release|debug|instrumented)/;
-var nodeFramePattern = /\((?:timers\.js):\d+:\d+\)/;
-var parseLinePattern = /[\/<\(](.+?):(\d+):(\d+)\)?\s*$/;
-var stackFramePattern = null;
-var formatStack = null;
-var indentStackFrames = false;
-var printWarning;
-var debugging = !!(util.env("BLUEBIRD_DEBUG") != 0 &&
- ( false ||
- util.env("BLUEBIRD_DEBUG") ||
- util.env("NODE_ENV") === "development"));
-
-var warnings = !!(util.env("BLUEBIRD_WARNINGS") != 0 &&
- (debugging || util.env("BLUEBIRD_WARNINGS")));
-var longStackTraces = !!(util.env("BLUEBIRD_LONG_STACK_TRACES") != 0 &&
- (debugging || util.env("BLUEBIRD_LONG_STACK_TRACES")));
+module.exports = __webpack_require__(900).promisify(__webpack_require__(687))
-var wForgottenReturn = util.env("BLUEBIRD_W_FORGOTTEN_RETURN") != 0 &&
- (warnings || !!util.env("BLUEBIRD_W_FORGOTTEN_RETURN"));
-var deferUnhandledRejectionCheck;
-(function() {
- var promises = [];
+/***/ }),
+/* 219 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
- function unhandledRejectionCheck() {
- for (var i = 0; i < promises.length; ++i) {
- promises[i]._notifyUnhandledRejection();
- }
- unhandledRejectionClear();
- }
+const Range = __webpack_require__(124)
- function unhandledRejectionClear() {
- promises.length = 0;
- }
+// Mostly just for testing and legacy API reasons
+const toComparators = (range, options) =>
+ new Range(range, options).set
+ .map(comp => comp.map(c => c.value).join(' ').trim().split(' '))
- deferUnhandledRejectionCheck = function(promise) {
- promises.push(promise);
- setTimeout(unhandledRejectionCheck, 1);
- };
+module.exports = toComparators
- es5.defineProperty(Promise, "_unhandledRejectionCheck", {
- value: unhandledRejectionCheck
- });
- es5.defineProperty(Promise, "_unhandledRejectionClear", {
- value: unhandledRejectionClear
- });
-})();
-Promise.prototype.suppressUnhandledRejections = function() {
- var target = this._target();
- target._bitField = ((target._bitField & (~1048576)) |
- 524288);
-};
+/***/ }),
+/* 220 */
+/***/ (function(__unusedmodule, exports) {
-Promise.prototype._ensurePossibleRejectionHandled = function () {
- if ((this._bitField & 524288) !== 0) return;
- this._setRejectionIsUnhandled();
- deferUnhandledRejectionCheck(this);
-};
+"use strict";
-Promise.prototype._notifyUnhandledRejectionIsHandled = function () {
- fireRejectionEvent("rejectionHandled",
- unhandledRejectionHandled, undefined, this);
-};
+/*
+ * Copyright The OpenTelemetry Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+Object.defineProperty(exports, "__esModule", { value: true });
+//# sourceMappingURL=SpanOptions.js.map
-Promise.prototype._setReturnedNonUndefined = function() {
- this._bitField = this._bitField | 268435456;
-};
+/***/ }),
+/* 221 */,
+/* 222 */,
+/* 223 */,
+/* 224 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
-Promise.prototype._returnedNonUndefined = function() {
- return (this._bitField & 268435456) !== 0;
-};
+"use strict";
-Promise.prototype._notifyUnhandledRejection = function () {
- if (this._isRejectionUnhandled()) {
- var reason = this._settledValue();
- this._setUnhandledRejectionIsNotified();
- fireRejectionEvent("unhandledRejection",
- possiblyUnhandledRejection, reason, this);
+/*
+ * Copyright The OpenTelemetry Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.NOOP_TRACER_PROVIDER = exports.NoopTracerProvider = void 0;
+var NoopTracer_1 = __webpack_require__(151);
+/**
+ * An implementation of the {@link TracerProvider} which returns an impotent
+ * Tracer for all calls to `getTracer`.
+ *
+ * All operations are no-op.
+ */
+var NoopTracerProvider = /** @class */ (function () {
+ function NoopTracerProvider() {
}
-};
+ NoopTracerProvider.prototype.getTracer = function (_name, _version) {
+ return NoopTracer_1.NOOP_TRACER;
+ };
+ return NoopTracerProvider;
+}());
+exports.NoopTracerProvider = NoopTracerProvider;
+exports.NOOP_TRACER_PROVIDER = new NoopTracerProvider();
+//# sourceMappingURL=NoopTracerProvider.js.map
-Promise.prototype._setUnhandledRejectionIsNotified = function () {
- this._bitField = this._bitField | 262144;
-};
+/***/ }),
+/* 225 */,
+/* 226 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
-Promise.prototype._unsetUnhandledRejectionIsNotified = function () {
- this._bitField = this._bitField & (~262144);
-};
+"use strict";
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
-Promise.prototype._isUnhandledRejectionNotified = function () {
- return (this._bitField & 262144) > 0;
-};
-Promise.prototype._setRejectionIsUnhandled = function () {
- this._bitField = this._bitField | 1048576;
-};
-Promise.prototype._unsetRejectionIsUnhandled = function () {
- this._bitField = this._bitField & (~1048576);
- if (this._isUnhandledRejectionNotified()) {
- this._unsetUnhandledRejectionIsNotified();
- this._notifyUnhandledRejectionIsHandled();
- }
-};
+/**/
-Promise.prototype._isRejectionUnhandled = function () {
- return (this._bitField & 1048576) > 0;
-};
+var pna = __webpack_require__(78);
+/**/
-Promise.prototype._warn = function(message, shouldUseOwnTrace, promise) {
- return warn(message, shouldUseOwnTrace, promise || this);
-};
+module.exports = Readable;
-Promise.onPossiblyUnhandledRejection = function (fn) {
- var context = Promise._getContext();
- possiblyUnhandledRejection = util.contextBind(context, fn);
-};
+/**/
+var isArray = __webpack_require__(262);
+/**/
-Promise.onUnhandledRejectionHandled = function (fn) {
- var context = Promise._getContext();
- unhandledRejectionHandled = util.contextBind(context, fn);
-};
+/**/
+var Duplex;
+/**/
-var disableLongStackTraces = function() {};
-Promise.longStackTraces = function () {
- if (async.haveItemsQueued() && !config.longStackTraces) {
- throw new Error("cannot enable long stack traces after promises have been created\u000a\u000a See http://goo.gl/MqrFmX\u000a");
- }
- if (!config.longStackTraces && longStackTracesIsSupported()) {
- var Promise_captureStackTrace = Promise.prototype._captureStackTrace;
- var Promise_attachExtraTrace = Promise.prototype._attachExtraTrace;
- var Promise_dereferenceTrace = Promise.prototype._dereferenceTrace;
- config.longStackTraces = true;
- disableLongStackTraces = function() {
- if (async.haveItemsQueued() && !config.longStackTraces) {
- throw new Error("cannot enable long stack traces after promises have been created\u000a\u000a See http://goo.gl/MqrFmX\u000a");
- }
- Promise.prototype._captureStackTrace = Promise_captureStackTrace;
- Promise.prototype._attachExtraTrace = Promise_attachExtraTrace;
- Promise.prototype._dereferenceTrace = Promise_dereferenceTrace;
- Context.deactivateLongStackTraces();
- config.longStackTraces = false;
- };
- Promise.prototype._captureStackTrace = longStackTracesCaptureStackTrace;
- Promise.prototype._attachExtraTrace = longStackTracesAttachExtraTrace;
- Promise.prototype._dereferenceTrace = longStackTracesDereferenceTrace;
- Context.activateLongStackTraces();
- }
-};
+Readable.ReadableState = ReadableState;
-Promise.hasLongStackTraces = function () {
- return config.longStackTraces && longStackTracesIsSupported();
+/**/
+var EE = __webpack_require__(614).EventEmitter;
+
+var EElistenerCount = function (emitter, type) {
+ return emitter.listeners(type).length;
};
+/**/
+/**/
+var Stream = __webpack_require__(427);
+/**/
-var legacyHandlers = {
- unhandledrejection: {
- before: function() {
- var ret = util.global.onunhandledrejection;
- util.global.onunhandledrejection = null;
- return ret;
- },
- after: function(fn) {
- util.global.onunhandledrejection = fn;
- }
- },
- rejectionhandled: {
- before: function() {
- var ret = util.global.onrejectionhandled;
- util.global.onrejectionhandled = null;
- return ret;
- },
- after: function(fn) {
- util.global.onrejectionhandled = fn;
- }
- }
-};
+/**/
-var fireDomEvent = (function() {
- var dispatch = function(legacy, e) {
- if (legacy) {
- var fn;
- try {
- fn = legacy.before();
- return !util.global.dispatchEvent(e);
- } finally {
- legacy.after(fn);
- }
- } else {
- return !util.global.dispatchEvent(e);
- }
- };
- try {
- if (typeof CustomEvent === "function") {
- var event = new CustomEvent("CustomEvent");
- util.global.dispatchEvent(event);
- return function(name, event) {
- name = name.toLowerCase();
- var eventData = {
- detail: event,
- cancelable: true
- };
- var domEvent = new CustomEvent(name, eventData);
- es5.defineProperty(
- domEvent, "promise", {value: event.promise});
- es5.defineProperty(
- domEvent, "reason", {value: event.reason});
+var Buffer = __webpack_require__(254).Buffer;
+var OurUint8Array = global.Uint8Array || function () {};
+function _uint8ArrayToBuffer(chunk) {
+ return Buffer.from(chunk);
+}
+function _isUint8Array(obj) {
+ return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
+}
- return dispatch(legacyHandlers[name], domEvent);
- };
- } else if (typeof Event === "function") {
- var event = new Event("CustomEvent");
- util.global.dispatchEvent(event);
- return function(name, event) {
- name = name.toLowerCase();
- var domEvent = new Event(name, {
- cancelable: true
- });
- domEvent.detail = event;
- es5.defineProperty(domEvent, "promise", {value: event.promise});
- es5.defineProperty(domEvent, "reason", {value: event.reason});
- return dispatch(legacyHandlers[name], domEvent);
- };
- } else {
- var event = document.createEvent("CustomEvent");
- event.initCustomEvent("testingtheevent", false, true, {});
- util.global.dispatchEvent(event);
- return function(name, event) {
- name = name.toLowerCase();
- var domEvent = document.createEvent("CustomEvent");
- domEvent.initCustomEvent(name, false, true,
- event);
- return dispatch(legacyHandlers[name], domEvent);
- };
- }
- } catch (e) {}
- return function() {
- return false;
- };
-})();
+/**/
-var fireGlobalEvent = (function() {
- if (util.isNode) {
- return function() {
- return process.emit.apply(process, arguments);
- };
- } else {
- if (!util.global) {
- return function() {
- return false;
- };
- }
- return function(name) {
- var methodName = "on" + name.toLowerCase();
- var method = util.global[methodName];
- if (!method) return false;
- method.apply(util.global, [].slice.call(arguments, 1));
- return true;
- };
- }
-})();
+/**/
+var util = Object.create(__webpack_require__(286));
+util.inherits = __webpack_require__(689);
+/**/
-function generatePromiseLifecycleEventObject(name, promise) {
- return {promise: promise};
+/**/
+var debugUtil = __webpack_require__(669);
+var debug = void 0;
+if (debugUtil && debugUtil.debuglog) {
+ debug = debugUtil.debuglog('stream');
+} else {
+ debug = function () {};
}
+/**/
-var eventToObjectGenerator = {
- promiseCreated: generatePromiseLifecycleEventObject,
- promiseFulfilled: generatePromiseLifecycleEventObject,
- promiseRejected: generatePromiseLifecycleEventObject,
- promiseResolved: generatePromiseLifecycleEventObject,
- promiseCancelled: generatePromiseLifecycleEventObject,
- promiseChained: function(name, promise, child) {
- return {promise: promise, child: child};
- },
- warning: function(name, warning) {
- return {warning: warning};
- },
- unhandledRejection: function (name, reason, promise) {
- return {reason: reason, promise: promise};
- },
- rejectionHandled: generatePromiseLifecycleEventObject
-};
+var BufferList = __webpack_require__(76);
+var destroyImpl = __webpack_require__(232);
+var StringDecoder;
-var activeFireEvent = function (name) {
- var globalEventFired = false;
- try {
- globalEventFired = fireGlobalEvent.apply(null, arguments);
- } catch (e) {
- async.throwLater(e);
- globalEventFired = true;
- }
+util.inherits(Readable, Stream);
- var domEventFired = false;
- try {
- domEventFired = fireDomEvent(name,
- eventToObjectGenerator[name].apply(null, arguments));
- } catch (e) {
- async.throwLater(e);
- domEventFired = true;
- }
+var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume'];
- return domEventFired || globalEventFired;
-};
+function prependListener(emitter, event, fn) {
+ // Sadly this is not cacheable as some libraries bundle their own
+ // event emitter implementation with them.
+ if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn);
-Promise.config = function(opts) {
- opts = Object(opts);
- if ("longStackTraces" in opts) {
- if (opts.longStackTraces) {
- Promise.longStackTraces();
- } else if (!opts.longStackTraces && Promise.hasLongStackTraces()) {
- disableLongStackTraces();
- }
- }
- if ("warnings" in opts) {
- var warningsOption = opts.warnings;
- config.warnings = !!warningsOption;
- wForgottenReturn = config.warnings;
+ // This is a hack to make sure that our error handler is attached before any
+ // userland ones. NEVER DO THIS. This is here only because this code needs
+ // to continue to work with older versions of Node.js that do not include
+ // the prependListener() method. The goal is to eventually remove this hack.
+ if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]];
+}
- if (util.isObject(warningsOption)) {
- if ("wForgottenReturn" in warningsOption) {
- wForgottenReturn = !!warningsOption.wForgottenReturn;
- }
- }
- }
- if ("cancellation" in opts && opts.cancellation && !config.cancellation) {
- if (async.haveItemsQueued()) {
- throw new Error(
- "cannot enable cancellation after promises are in use");
- }
- Promise.prototype._clearCancellationData =
- cancellationClearCancellationData;
- Promise.prototype._propagateFrom = cancellationPropagateFrom;
- Promise.prototype._onCancel = cancellationOnCancel;
- Promise.prototype._setOnCancel = cancellationSetOnCancel;
- Promise.prototype._attachCancellationCallback =
- cancellationAttachCancellationCallback;
- Promise.prototype._execute = cancellationExecute;
- propagateFromFunction = cancellationPropagateFrom;
- config.cancellation = true;
- }
- if ("monitoring" in opts) {
- if (opts.monitoring && !config.monitoring) {
- config.monitoring = true;
- Promise.prototype._fireEvent = activeFireEvent;
- } else if (!opts.monitoring && config.monitoring) {
- config.monitoring = false;
- Promise.prototype._fireEvent = defaultFireEvent;
- }
- }
- if ("asyncHooks" in opts && util.nodeSupportsAsyncResource) {
- var prev = config.asyncHooks;
- var cur = !!opts.asyncHooks;
- if (prev !== cur) {
- config.asyncHooks = cur;
- if (cur) {
- enableAsyncHooks();
- } else {
- disableAsyncHooks();
- }
- }
- }
- return Promise;
-};
+function ReadableState(options, stream) {
+ Duplex = Duplex || __webpack_require__(907);
-function defaultFireEvent() { return false; }
+ options = options || {};
-Promise.prototype._fireEvent = defaultFireEvent;
-Promise.prototype._execute = function(executor, resolve, reject) {
- try {
- executor(resolve, reject);
- } catch (e) {
- return e;
- }
-};
-Promise.prototype._onCancel = function () {};
-Promise.prototype._setOnCancel = function (handler) { ; };
-Promise.prototype._attachCancellationCallback = function(onCancel) {
- ;
-};
-Promise.prototype._captureStackTrace = function () {};
-Promise.prototype._attachExtraTrace = function () {};
-Promise.prototype._dereferenceTrace = function () {};
-Promise.prototype._clearCancellationData = function() {};
-Promise.prototype._propagateFrom = function (parent, flags) {
- ;
- ;
-};
+ // Duplex streams are both readable and writable, but share
+ // the same options object.
+ // However, some cases require setting options to different
+ // values for the readable and the writable sides of the duplex stream.
+ // These options can be provided separately as readableXXX and writableXXX.
+ var isDuplex = stream instanceof Duplex;
-function cancellationExecute(executor, resolve, reject) {
- var promise = this;
- try {
- executor(resolve, reject, function(onCancel) {
- if (typeof onCancel !== "function") {
- throw new TypeError("onCancel must be a function, got: " +
- util.toString(onCancel));
- }
- promise._attachCancellationCallback(onCancel);
- });
- } catch (e) {
- return e;
- }
-}
+ // object stream flag. Used to make read(n) ignore n and to
+ // make all the buffer merging and length checks go away
+ this.objectMode = !!options.objectMode;
-function cancellationAttachCancellationCallback(onCancel) {
- if (!this._isCancellable()) return this;
+ if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode;
- var previousOnCancel = this._onCancel();
- if (previousOnCancel !== undefined) {
- if (util.isArray(previousOnCancel)) {
- previousOnCancel.push(onCancel);
- } else {
- this._setOnCancel([previousOnCancel, onCancel]);
- }
- } else {
- this._setOnCancel(onCancel);
- }
-}
+ // the point at which it stops calling _read() to fill the buffer
+ // Note: 0 is a valid value, means "don't call _read preemptively ever"
+ var hwm = options.highWaterMark;
+ var readableHwm = options.readableHighWaterMark;
+ var defaultHwm = this.objectMode ? 16 : 16 * 1024;
-function cancellationOnCancel() {
- return this._onCancelField;
-}
+ if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm;
-function cancellationSetOnCancel(onCancel) {
- this._onCancelField = onCancel;
-}
+ // cast to ints.
+ this.highWaterMark = Math.floor(this.highWaterMark);
-function cancellationClearCancellationData() {
- this._cancellationParent = undefined;
- this._onCancelField = undefined;
-}
+ // A linked list is used to store data chunks instead of an array because the
+ // linked list can remove elements from the beginning faster than
+ // array.shift()
+ this.buffer = new BufferList();
+ this.length = 0;
+ this.pipes = null;
+ this.pipesCount = 0;
+ this.flowing = null;
+ this.ended = false;
+ this.endEmitted = false;
+ this.reading = false;
-function cancellationPropagateFrom(parent, flags) {
- if ((flags & 1) !== 0) {
- this._cancellationParent = parent;
- var branchesRemainingToCancel = parent._branchesRemainingToCancel;
- if (branchesRemainingToCancel === undefined) {
- branchesRemainingToCancel = 0;
- }
- parent._branchesRemainingToCancel = branchesRemainingToCancel + 1;
- }
- if ((flags & 2) !== 0 && parent._isBound()) {
- this._setBoundTo(parent._boundTo);
- }
-}
+ // a flag to be able to tell if the event 'readable'/'data' is emitted
+ // immediately, or on a later tick. We set this to true at first, because
+ // any actions that shouldn't happen until "later" should generally also
+ // not happen before the first read call.
+ this.sync = true;
-function bindingPropagateFrom(parent, flags) {
- if ((flags & 2) !== 0 && parent._isBound()) {
- this._setBoundTo(parent._boundTo);
- }
-}
-var propagateFromFunction = bindingPropagateFrom;
+ // whenever we return null, then we set a flag to say
+ // that we're awaiting a 'readable' event emission.
+ this.needReadable = false;
+ this.emittedReadable = false;
+ this.readableListening = false;
+ this.resumeScheduled = false;
-function boundValueFunction() {
- var ret = this._boundTo;
- if (ret !== undefined) {
- if (ret instanceof Promise) {
- if (ret.isFulfilled()) {
- return ret.value();
- } else {
- return undefined;
- }
- }
- }
- return ret;
-}
+ // has it been destroyed
+ this.destroyed = false;
-function longStackTracesCaptureStackTrace() {
- this._trace = new CapturedTrace(this._peekContext());
-}
+ // Crypto is kind of old and crusty. Historically, its default string
+ // encoding is 'binary' so we have to make this configurable.
+ // Everything else in the universe uses 'utf8', though.
+ this.defaultEncoding = options.defaultEncoding || 'utf8';
-function longStackTracesAttachExtraTrace(error, ignoreSelf) {
- if (canAttachTrace(error)) {
- var trace = this._trace;
- if (trace !== undefined) {
- if (ignoreSelf) trace = trace._parent;
- }
- if (trace !== undefined) {
- trace.attachExtraTrace(error);
- } else if (!error.__stackCleaned__) {
- var parsed = parseStackAndMessage(error);
- util.notEnumerableProp(error, "stack",
- parsed.message + "\n" + parsed.stack.join("\n"));
- util.notEnumerableProp(error, "__stackCleaned__", true);
- }
- }
-}
+ // the number of writers that are awaiting a drain event in .pipe()s
+ this.awaitDrain = 0;
-function longStackTracesDereferenceTrace() {
- this._trace = undefined;
-}
+ // if true, a maybeReadMore has been scheduled
+ this.readingMore = false;
-function checkForgottenReturns(returnValue, promiseCreated, name, promise,
- parent) {
- if (returnValue === undefined && promiseCreated !== null &&
- wForgottenReturn) {
- if (parent !== undefined && parent._returnedNonUndefined()) return;
- if ((promise._bitField & 65535) === 0) return;
+ this.decoder = null;
+ this.encoding = null;
+ if (options.encoding) {
+ if (!StringDecoder) StringDecoder = __webpack_require__(432).StringDecoder;
+ this.decoder = new StringDecoder(options.encoding);
+ this.encoding = options.encoding;
+ }
+}
- if (name) name = name + " ";
- var handlerLine = "";
- var creatorLine = "";
- if (promiseCreated._trace) {
- var traceLines = promiseCreated._trace.stack.split("\n");
- var stack = cleanStack(traceLines);
- for (var i = stack.length - 1; i >= 0; --i) {
- var line = stack[i];
- if (!nodeFramePattern.test(line)) {
- var lineMatches = line.match(parseLinePattern);
- if (lineMatches) {
- handlerLine = "at " + lineMatches[1] +
- ":" + lineMatches[2] + ":" + lineMatches[3] + " ";
- }
- break;
- }
- }
+function Readable(options) {
+ Duplex = Duplex || __webpack_require__(907);
- if (stack.length > 0) {
- var firstUserLine = stack[0];
- for (var i = 0; i < traceLines.length; ++i) {
+ if (!(this instanceof Readable)) return new Readable(options);
- if (traceLines[i] === firstUserLine) {
- if (i > 0) {
- creatorLine = "\n" + traceLines[i - 1];
- }
- break;
- }
- }
+ this._readableState = new ReadableState(options, this);
- }
- }
- var msg = "a promise was created in a " + name +
- "handler " + handlerLine + "but was not returned from it, " +
- "see http://goo.gl/rRqMUw" +
- creatorLine;
- promise._warn(msg, true, promiseCreated);
- }
-}
+ // legacy
+ this.readable = true;
-function deprecated(name, replacement) {
- var message = name +
- " is deprecated and will be removed in a future version.";
- if (replacement) message += " Use " + replacement + " instead.";
- return warn(message);
-}
+ if (options) {
+ if (typeof options.read === 'function') this._read = options.read;
-function warn(message, shouldUseOwnTrace, promise) {
- if (!config.warnings) return;
- var warning = new Warning(message);
- var ctx;
- if (shouldUseOwnTrace) {
- promise._attachExtraTrace(warning);
- } else if (config.longStackTraces && (ctx = Promise._peekContext())) {
- ctx.attachExtraTrace(warning);
- } else {
- var parsed = parseStackAndMessage(warning);
- warning.stack = parsed.message + "\n" + parsed.stack.join("\n");
- }
+ if (typeof options.destroy === 'function') this._destroy = options.destroy;
+ }
- if (!activeFireEvent("warning", warning)) {
- formatAndLogError(warning, "", true);
- }
+ Stream.call(this);
}
-function reconstructStack(message, stacks) {
- for (var i = 0; i < stacks.length - 1; ++i) {
- stacks[i].push("From previous event:");
- stacks[i] = stacks[i].join("\n");
+Object.defineProperty(Readable.prototype, 'destroyed', {
+ get: function () {
+ if (this._readableState === undefined) {
+ return false;
}
- if (i < stacks.length) {
- stacks[i] = stacks[i].join("\n");
+ return this._readableState.destroyed;
+ },
+ set: function (value) {
+ // we ignore the value if the stream
+ // has not been initialized yet
+ if (!this._readableState) {
+ return;
}
- return message + "\n" + stacks.join("\n");
-}
-function removeDuplicateOrEmptyJumps(stacks) {
- for (var i = 0; i < stacks.length; ++i) {
- if (stacks[i].length === 0 ||
- ((i + 1 < stacks.length) && stacks[i][0] === stacks[i+1][0])) {
- stacks.splice(i, 1);
- i--;
- }
- }
-}
+ // backward compatibility, the user is explicitly
+ // managing destroyed
+ this._readableState.destroyed = value;
+ }
+});
-function removeCommonRoots(stacks) {
- var current = stacks[0];
- for (var i = 1; i < stacks.length; ++i) {
- var prev = stacks[i];
- var currentLastIndex = current.length - 1;
- var currentLastLine = current[currentLastIndex];
- var commonRootMeetPoint = -1;
+Readable.prototype.destroy = destroyImpl.destroy;
+Readable.prototype._undestroy = destroyImpl.undestroy;
+Readable.prototype._destroy = function (err, cb) {
+ this.push(null);
+ cb(err);
+};
- for (var j = prev.length - 1; j >= 0; --j) {
- if (prev[j] === currentLastLine) {
- commonRootMeetPoint = j;
- break;
- }
- }
+// Manually shove something into the read() buffer.
+// This returns true if the highWaterMark has not been hit yet,
+// similar to how Writable.write() returns true if you should
+// write() some more.
+Readable.prototype.push = function (chunk, encoding) {
+ var state = this._readableState;
+ var skipChunkCheck;
- for (var j = commonRootMeetPoint; j >= 0; --j) {
- var line = prev[j];
- if (current[currentLastIndex] === line) {
- current.pop();
- currentLastIndex--;
- } else {
- break;
- }
- }
- current = prev;
+ if (!state.objectMode) {
+ if (typeof chunk === 'string') {
+ encoding = encoding || state.defaultEncoding;
+ if (encoding !== state.encoding) {
+ chunk = Buffer.from(chunk, encoding);
+ encoding = '';
+ }
+ skipChunkCheck = true;
}
-}
+ } else {
+ skipChunkCheck = true;
+ }
-function cleanStack(stack) {
- var ret = [];
- for (var i = 0; i < stack.length; ++i) {
- var line = stack[i];
- var isTraceLine = " (No stack trace)" === line ||
- stackFramePattern.test(line);
- var isInternalFrame = isTraceLine && shouldIgnore(line);
- if (isTraceLine && !isInternalFrame) {
- if (indentStackFrames && line.charAt(0) !== " ") {
- line = " " + line;
- }
- ret.push(line);
- }
- }
- return ret;
-}
+ return readableAddChunk(this, chunk, encoding, false, skipChunkCheck);
+};
-function stackFramesAsArray(error) {
- var stack = error.stack.replace(/\s+$/g, "").split("\n");
- for (var i = 0; i < stack.length; ++i) {
- var line = stack[i];
- if (" (No stack trace)" === line || stackFramePattern.test(line)) {
- break;
- }
- }
- if (i > 0 && error.name != "SyntaxError") {
- stack = stack.slice(i);
- }
- return stack;
-}
+// Unshift should *always* be something directly out of read()
+Readable.prototype.unshift = function (chunk) {
+ return readableAddChunk(this, chunk, null, true, false);
+};
-function parseStackAndMessage(error) {
- var stack = error.stack;
- var message = error.toString();
- stack = typeof stack === "string" && stack.length > 0
- ? stackFramesAsArray(error) : [" (No stack trace)"];
- return {
- message: message,
- stack: error.name == "SyntaxError" ? stack : cleanStack(stack)
- };
-}
+function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) {
+ var state = stream._readableState;
+ if (chunk === null) {
+ state.reading = false;
+ onEofChunk(stream, state);
+ } else {
+ var er;
+ if (!skipChunkCheck) er = chunkInvalid(state, chunk);
+ if (er) {
+ stream.emit('error', er);
+ } else if (state.objectMode || chunk && chunk.length > 0) {
+ if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) {
+ chunk = _uint8ArrayToBuffer(chunk);
+ }
-function formatAndLogError(error, title, isSoft) {
- if (typeof console !== "undefined") {
- var message;
- if (util.isObject(error)) {
- var stack = error.stack;
- message = title + formatStack(stack, error);
+ if (addToFront) {
+ if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true);
+ } else if (state.ended) {
+ stream.emit('error', new Error('stream.push() after EOF'));
+ } else {
+ state.reading = false;
+ if (state.decoder && !encoding) {
+ chunk = state.decoder.write(chunk);
+ if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state);
} else {
- message = title + String(error);
- }
- if (typeof printWarning === "function") {
- printWarning(message, isSoft);
- } else if (typeof console.log === "function" ||
- typeof console.log === "object") {
- console.log(message);
- }
- }
-}
-
-function fireRejectionEvent(name, localHandler, reason, promise) {
- var localEventFired = false;
- try {
- if (typeof localHandler === "function") {
- localEventFired = true;
- if (name === "rejectionHandled") {
- localHandler(promise);
- } else {
- localHandler(reason, promise);
- }
+ addChunk(stream, state, chunk, false);
}
- } catch (e) {
- async.throwLater(e);
+ }
+ } else if (!addToFront) {
+ state.reading = false;
}
+ }
- if (name === "unhandledRejection") {
- if (!activeFireEvent(name, reason, promise) && !localEventFired) {
- formatAndLogError(reason, "Unhandled rejection ");
- }
- } else {
- activeFireEvent(name, promise);
- }
+ return needMoreData(state);
}
-function formatNonError(obj) {
- var str;
- if (typeof obj === "function") {
- str = "[function " +
- (obj.name || "anonymous") +
- "]";
- } else {
- str = obj && typeof obj.toString === "function"
- ? obj.toString() : util.toString(obj);
- var ruselessToString = /\[object [a-zA-Z0-9$_]+\]/;
- if (ruselessToString.test(str)) {
- try {
- var newStr = JSON.stringify(obj);
- str = newStr;
- }
- catch(e) {
+function addChunk(stream, state, chunk, addToFront) {
+ if (state.flowing && state.length === 0 && !state.sync) {
+ stream.emit('data', chunk);
+ stream.read(0);
+ } else {
+ // update the buffer info.
+ state.length += state.objectMode ? 1 : chunk.length;
+ if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);
- }
- }
- if (str.length === 0) {
- str = "(empty array)";
- }
- }
- return ("(<" + snip(str) + ">, no stack trace)");
+ if (state.needReadable) emitReadable(stream);
+ }
+ maybeReadMore(stream, state);
}
-function snip(str) {
- var maxChars = 41;
- if (str.length < maxChars) {
- return str;
- }
- return str.substr(0, maxChars - 3) + "...";
+function chunkInvalid(state, chunk) {
+ var er;
+ if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
+ er = new TypeError('Invalid non-string/buffer chunk');
+ }
+ return er;
}
-function longStackTracesIsSupported() {
- return typeof captureStackTrace === "function";
+// if it's past the high water mark, we can push in some more.
+// Also, if we have no data yet, we can stand some
+// more bytes. This is to work around cases where hwm=0,
+// such as the repl. Also, if the push() triggered a
+// readable event, and the user called read(largeNumber) such that
+// needReadable was set, then we ought to push more, so that another
+// 'readable' event will be triggered.
+function needMoreData(state) {
+ return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0);
}
-var shouldIgnore = function() { return false; };
-var parseLineInfoRegex = /[\/<\(]([^:\/]+):(\d+):(?:\d+)\)?\s*$/;
-function parseLineInfo(line) {
- var matches = line.match(parseLineInfoRegex);
- if (matches) {
- return {
- fileName: matches[1],
- line: parseInt(matches[2], 10)
- };
- }
-}
+Readable.prototype.isPaused = function () {
+ return this._readableState.flowing === false;
+};
-function setBounds(firstLineError, lastLineError) {
- if (!longStackTracesIsSupported()) return;
- var firstStackLines = (firstLineError.stack || "").split("\n");
- var lastStackLines = (lastLineError.stack || "").split("\n");
- var firstIndex = -1;
- var lastIndex = -1;
- var firstFileName;
- var lastFileName;
- for (var i = 0; i < firstStackLines.length; ++i) {
- var result = parseLineInfo(firstStackLines[i]);
- if (result) {
- firstFileName = result.fileName;
- firstIndex = result.line;
- break;
- }
- }
- for (var i = 0; i < lastStackLines.length; ++i) {
- var result = parseLineInfo(lastStackLines[i]);
- if (result) {
- lastFileName = result.fileName;
- lastIndex = result.line;
- break;
- }
- }
- if (firstIndex < 0 || lastIndex < 0 || !firstFileName || !lastFileName ||
- firstFileName !== lastFileName || firstIndex >= lastIndex) {
- return;
- }
+// backwards compatibility.
+Readable.prototype.setEncoding = function (enc) {
+ if (!StringDecoder) StringDecoder = __webpack_require__(432).StringDecoder;
+ this._readableState.decoder = new StringDecoder(enc);
+ this._readableState.encoding = enc;
+ return this;
+};
- shouldIgnore = function(line) {
- if (bluebirdFramePattern.test(line)) return true;
- var info = parseLineInfo(line);
- if (info) {
- if (info.fileName === firstFileName &&
- (firstIndex <= info.line && info.line <= lastIndex)) {
- return true;
- }
- }
- return false;
- };
+// Don't raise the hwm > 8MB
+var MAX_HWM = 0x800000;
+function computeNewHighWaterMark(n) {
+ if (n >= MAX_HWM) {
+ n = MAX_HWM;
+ } else {
+ // Get the next highest power of 2 to prevent increasing hwm excessively in
+ // tiny amounts
+ n--;
+ n |= n >>> 1;
+ n |= n >>> 2;
+ n |= n >>> 4;
+ n |= n >>> 8;
+ n |= n >>> 16;
+ n++;
+ }
+ return n;
}
-function CapturedTrace(parent) {
- this._parent = parent;
- this._promisesCreated = 0;
- var length = this._length = 1 + (parent === undefined ? 0 : parent._length);
- captureStackTrace(this, CapturedTrace);
- if (length > 32) this.uncycle();
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
+function howMuchToRead(n, state) {
+ if (n <= 0 || state.length === 0 && state.ended) return 0;
+ if (state.objectMode) return 1;
+ if (n !== n) {
+ // Only flow one buffer at a time
+ if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;
+ }
+ // If we're asking for more than the current hwm, then raise the hwm.
+ if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);
+ if (n <= state.length) return n;
+ // Don't have enough
+ if (!state.ended) {
+ state.needReadable = true;
+ return 0;
+ }
+ return state.length;
}
-util.inherits(CapturedTrace, Error);
-Context.CapturedTrace = CapturedTrace;
-CapturedTrace.prototype.uncycle = function() {
- var length = this._length;
- if (length < 2) return;
- var nodes = [];
- var stackToIndex = {};
+// you can override either this method, or the async _read(n) below.
+Readable.prototype.read = function (n) {
+ debug('read', n);
+ n = parseInt(n, 10);
+ var state = this._readableState;
+ var nOrig = n;
- for (var i = 0, node = this; node !== undefined; ++i) {
- nodes.push(node);
- node = node._parent;
- }
- length = this._length = i;
- for (var i = length - 1; i >= 0; --i) {
- var stack = nodes[i].stack;
- if (stackToIndex[stack] === undefined) {
- stackToIndex[stack] = i;
- }
- }
- for (var i = 0; i < length; ++i) {
- var currentStack = nodes[i].stack;
- var index = stackToIndex[currentStack];
- if (index !== undefined && index !== i) {
- if (index > 0) {
- nodes[index - 1]._parent = undefined;
- nodes[index - 1]._length = 1;
- }
- nodes[i]._parent = undefined;
- nodes[i]._length = 1;
- var cycleEdgeNode = i > 0 ? nodes[i - 1] : this;
+ if (n !== 0) state.emittedReadable = false;
- if (index < length - 1) {
- cycleEdgeNode._parent = nodes[index + 1];
- cycleEdgeNode._parent.uncycle();
- cycleEdgeNode._length =
- cycleEdgeNode._parent._length + 1;
- } else {
- cycleEdgeNode._parent = undefined;
- cycleEdgeNode._length = 1;
- }
- var currentChildLength = cycleEdgeNode._length + 1;
- for (var j = i - 2; j >= 0; --j) {
- nodes[j]._length = currentChildLength;
- currentChildLength++;
- }
- return;
- }
- }
-};
+ // if we're doing read(0) to trigger a readable event, but we
+ // already have a bunch of data in the buffer, then just trigger
+ // the 'readable' event and move on.
+ if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) {
+ debug('read: emitReadable', state.length, state.ended);
+ if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);
+ return null;
+ }
-CapturedTrace.prototype.attachExtraTrace = function(error) {
- if (error.__stackCleaned__) return;
- this.uncycle();
- var parsed = parseStackAndMessage(error);
- var message = parsed.message;
- var stacks = [parsed.stack];
+ n = howMuchToRead(n, state);
- var trace = this;
- while (trace !== undefined) {
- stacks.push(cleanStack(trace.stack.split("\n")));
- trace = trace._parent;
- }
- removeCommonRoots(stacks);
- removeDuplicateOrEmptyJumps(stacks);
- util.notEnumerableProp(error, "stack", reconstructStack(message, stacks));
- util.notEnumerableProp(error, "__stackCleaned__", true);
-};
+ // if we've ended, and we're now clear, then finish it up.
+ if (n === 0 && state.ended) {
+ if (state.length === 0) endReadable(this);
+ return null;
+ }
-var captureStackTrace = (function stackDetection() {
- var v8stackFramePattern = /^\s*at\s*/;
- var v8stackFormatter = function(stack, error) {
- if (typeof stack === "string") return stack;
+ // All the actual chunk generation logic needs to be
+ // *below* the call to _read. The reason is that in certain
+ // synthetic stream cases, such as passthrough streams, _read
+ // may be a completely synchronous operation which may change
+ // the state of the read buffer, providing enough data when
+ // before there was *not* enough.
+ //
+ // So, the steps are:
+ // 1. Figure out what the state of things will be after we do
+ // a read from the buffer.
+ //
+ // 2. If that resulting state will trigger a _read, then call _read.
+ // Note that this may be asynchronous, or synchronous. Yes, it is
+ // deeply ugly to write APIs this way, but that still doesn't mean
+ // that the Readable class should behave improperly, as streams are
+ // designed to be sync/async agnostic.
+ // Take note if the _read call is sync or async (ie, if the read call
+ // has returned yet), so that we know whether or not it's safe to emit
+ // 'readable' etc.
+ //
+ // 3. Actually pull the requested chunks out of the buffer and return.
- if (error.name !== undefined &&
- error.message !== undefined) {
- return error.toString();
- }
- return formatNonError(error);
- };
+ // if we need a readable event, then we need to do some reading.
+ var doRead = state.needReadable;
+ debug('need readable', doRead);
- if (typeof Error.stackTraceLimit === "number" &&
- typeof Error.captureStackTrace === "function") {
- Error.stackTraceLimit += 6;
- stackFramePattern = v8stackFramePattern;
- formatStack = v8stackFormatter;
- var captureStackTrace = Error.captureStackTrace;
+ // if we currently have less than the highWaterMark, then also read some
+ if (state.length === 0 || state.length - n < state.highWaterMark) {
+ doRead = true;
+ debug('length less than watermark', doRead);
+ }
- shouldIgnore = function(line) {
- return bluebirdFramePattern.test(line);
- };
- return function(receiver, ignoreUntil) {
- Error.stackTraceLimit += 6;
- captureStackTrace(receiver, ignoreUntil);
- Error.stackTraceLimit -= 6;
- };
- }
- var err = new Error();
+ // however, if we've ended, then there's no point, and if we're already
+ // reading, then it's unnecessary.
+ if (state.ended || state.reading) {
+ doRead = false;
+ debug('reading or ended', doRead);
+ } else if (doRead) {
+ debug('do read');
+ state.reading = true;
+ state.sync = true;
+ // if the length is currently zero, then we *need* a readable event.
+ if (state.length === 0) state.needReadable = true;
+ // call internal read method
+ this._read(state.highWaterMark);
+ state.sync = false;
+ // If _read pushed data synchronously, then `reading` will be false,
+ // and we need to re-evaluate how much data we can return to the user.
+ if (!state.reading) n = howMuchToRead(nOrig, state);
+ }
- if (typeof err.stack === "string" &&
- err.stack.split("\n")[0].indexOf("stackDetection@") >= 0) {
- stackFramePattern = /@/;
- formatStack = v8stackFormatter;
- indentStackFrames = true;
- return function captureStackTrace(o) {
- o.stack = new Error().stack;
- };
- }
+ var ret;
+ if (n > 0) ret = fromList(n, state);else ret = null;
- var hasStackAfterThrow;
- try { throw new Error(); }
- catch(e) {
- hasStackAfterThrow = ("stack" in e);
- }
- if (!("stack" in err) && hasStackAfterThrow &&
- typeof Error.stackTraceLimit === "number") {
- stackFramePattern = v8stackFramePattern;
- formatStack = v8stackFormatter;
- return function captureStackTrace(o) {
- Error.stackTraceLimit += 6;
- try { throw new Error(); }
- catch(e) { o.stack = e.stack; }
- Error.stackTraceLimit -= 6;
- };
- }
+ if (ret === null) {
+ state.needReadable = true;
+ n = 0;
+ } else {
+ state.length -= n;
+ }
- formatStack = function(stack, error) {
- if (typeof stack === "string") return stack;
+ if (state.length === 0) {
+ // If we have nothing in the buffer, then we want to know
+ // as soon as we *do* get something into the buffer.
+ if (!state.ended) state.needReadable = true;
- if ((typeof error === "object" ||
- typeof error === "function") &&
- error.name !== undefined &&
- error.message !== undefined) {
- return error.toString();
- }
- return formatNonError(error);
- };
+ // If we tried to read() past the EOF, then emit end on the next tick.
+ if (nOrig !== n && state.ended) endReadable(this);
+ }
- return null;
+ if (ret !== null) this.emit('data', ret);
-})([]);
+ return ret;
+};
-if (typeof console !== "undefined" && typeof console.warn !== "undefined") {
- printWarning = function (message) {
- console.warn(message);
- };
- if (util.isNode && process.stderr.isTTY) {
- printWarning = function(message, isSoft) {
- var color = isSoft ? "\u001b[33m" : "\u001b[31m";
- console.warn(color + message + "\u001b[0m\n");
- };
- } else if (!util.isNode && typeof (new Error().stack) === "string") {
- printWarning = function(message, isSoft) {
- console.warn("%c" + message,
- isSoft ? "color: darkorange" : "color: red");
- };
+function onEofChunk(stream, state) {
+ if (state.ended) return;
+ if (state.decoder) {
+ var chunk = state.decoder.end();
+ if (chunk && chunk.length) {
+ state.buffer.push(chunk);
+ state.length += state.objectMode ? 1 : chunk.length;
}
-}
-
-var config = {
- warnings: warnings,
- longStackTraces: false,
- cancellation: false,
- monitoring: false,
- asyncHooks: false
-};
-
-if (longStackTraces) Promise.longStackTraces();
-
-return {
- asyncHooks: function() {
- return config.asyncHooks;
- },
- longStackTraces: function() {
- return config.longStackTraces;
- },
- warnings: function() {
- return config.warnings;
- },
- cancellation: function() {
- return config.cancellation;
- },
- monitoring: function() {
- return config.monitoring;
- },
- propagateFromFunction: function() {
- return propagateFromFunction;
- },
- boundValueFunction: function() {
- return boundValueFunction;
- },
- checkForgottenReturns: checkForgottenReturns,
- setBounds: setBounds,
- warn: warn,
- deprecated: deprecated,
- CapturedTrace: CapturedTrace,
- fireDomEvent: fireDomEvent,
- fireGlobalEvent: fireGlobalEvent
-};
-};
-
-
-/***/ }),
-/* 273 */
-/***/ (function(module, exports, __webpack_require__) {
-
-"use strict";
-
+ }
+ state.ended = true;
-const path = __webpack_require__(622)
-const fs = __webpack_require__(598)
-const chain = __webpack_require__(433).chain
-const mkdir = __webpack_require__(836)
-const rm = __webpack_require__(974)
-const inferOwner = __webpack_require__(686)
-const chown = __webpack_require__(358)
+ // emit 'readable' now to make sure it gets picked up.
+ emitReadable(stream);
+}
-exports = module.exports = {
- link: link,
- linkIfExists: linkIfExists
+// Don't emit readable right away in sync mode, because this can trigger
+// another read() call => stack overflow. This way, it might trigger
+// a nextTick recursion warning, but that's not so bad.
+function emitReadable(stream) {
+ var state = stream._readableState;
+ state.needReadable = false;
+ if (!state.emittedReadable) {
+ debug('emitReadable', state.flowing);
+ state.emittedReadable = true;
+ if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream);
+ }
}
-function linkIfExists (from, to, opts, cb) {
- opts.currentIsLink = false
- opts.currentExists = false
- fs.stat(from, function (er) {
- if (er) return cb()
- fs.readlink(to, function (er, fromOnDisk) {
- if (!er || er.code !== 'ENOENT') {
- opts.currentExists = true
- }
- // if the link already exists and matches what we would do,
- // we don't need to do anything
- if (!er) {
- opts.currentIsLink = true
- var toDir = path.dirname(to)
- var absoluteFrom = path.resolve(toDir, from)
- var absoluteFromOnDisk = path.resolve(toDir, fromOnDisk)
- opts.currentTarget = absoluteFromOnDisk
- if (absoluteFrom === absoluteFromOnDisk) return cb()
- }
- link(from, to, opts, cb)
- })
- })
+function emitReadable_(stream) {
+ debug('emit readable');
+ stream.emit('readable');
+ flow(stream);
}
-function resolveIfSymlink (maybeSymlinkPath, cb) {
- fs.lstat(maybeSymlinkPath, function (err, stat) {
- if (err) return cb.apply(this, arguments)
- if (!stat.isSymbolicLink()) return cb(null, maybeSymlinkPath)
- fs.readlink(maybeSymlinkPath, cb)
- })
+// at this point, the user has presumably seen the 'readable' event,
+// and called read() to consume some data. that may have triggered
+// in turn another _read(n) call, in which case reading = true if
+// it's in progress.
+// However, if we're not ended, or reading, and the length < hwm,
+// then go ahead and try to read some more preemptively.
+function maybeReadMore(stream, state) {
+ if (!state.readingMore) {
+ state.readingMore = true;
+ pna.nextTick(maybeReadMore_, stream, state);
+ }
}
-function ensureFromIsNotSource (from, to, cb) {
- resolveIfSymlink(from, function (err, fromDestination) {
- if (err) return cb.apply(this, arguments)
- if (path.resolve(path.dirname(from), fromDestination) === path.resolve(to)) {
- return cb(new Error('Link target resolves to the same directory as link source: ' + to))
- }
- cb.apply(this, arguments)
- })
+function maybeReadMore_(stream, state) {
+ var len = state.length;
+ while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) {
+ debug('maybeReadMore read 0');
+ stream.read(0);
+ if (len === state.length)
+ // didn't get any data, stop spinning.
+ break;else len = state.length;
+ }
+ state.readingMore = false;
}
-function link (from, to, opts, cb) {
- to = path.resolve(to)
- opts.base = path.dirname(to)
- var absTarget = path.resolve(opts.base, from)
- var relativeTarget = path.relative(opts.base, absTarget)
- var target = opts.absolute ? absTarget : relativeTarget
+// abstract method. to be overridden in specific implementation classes.
+// call cb(er, data) where data is <= n in length.
+// for virtual (non-string, non-buffer) streams, "length" is somewhat
+// arbitrary, and perhaps not very meaningful.
+Readable.prototype._read = function (n) {
+ this.emit('error', new Error('_read() is not implemented'));
+};
- const tasks = [
- [ensureFromIsNotSource, absTarget, to],
- [fs, 'stat', absTarget],
- [clobberLinkGently, from, to, opts],
- [mkdir, path.dirname(to)],
- [fs, 'symlink', target, to, 'junction']
- ]
+Readable.prototype.pipe = function (dest, pipeOpts) {
+ var src = this;
+ var state = this._readableState;
- if (chown.selfOwner.uid !== 0) {
- chain(tasks, cb)
- } else {
- inferOwner(to).then(owner => {
- tasks.push([chown, to, owner.uid, owner.gid])
- chain(tasks, cb)
- })
+ switch (state.pipesCount) {
+ case 0:
+ state.pipes = dest;
+ break;
+ case 1:
+ state.pipes = [state.pipes, dest];
+ break;
+ default:
+ state.pipes.push(dest);
+ break;
}
-}
+ state.pipesCount += 1;
+ debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
-exports._clobberLinkGently = clobberLinkGently
-function clobberLinkGently (from, to, opts, cb) {
- if (opts.currentExists === false) {
- // nothing to clobber!
- opts.log.silly('gently link', 'link does not already exist', {
- link: to,
- target: from
- })
- return cb()
- }
+ var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;
- if (!opts.clobberLinkGently ||
- opts.force === true ||
- !opts.gently ||
- typeof opts.gently !== 'string') {
- opts.log.silly('gently link', 'deleting existing link forcefully', {
- link: to,
- target: from,
- force: opts.force,
- gently: opts.gently,
- clobberLinkGently: opts.clobberLinkGently
- })
- return rm(to, opts, cb)
- }
+ var endFn = doEnd ? onend : unpipe;
+ if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn);
- if (!opts.currentIsLink) {
- opts.log.verbose('gently link', 'cannot remove, not a link', to)
- // don't delete. it'll fail with EEXIST when it tries to symlink.
- return cb()
+ dest.on('unpipe', onunpipe);
+ function onunpipe(readable, unpipeInfo) {
+ debug('onunpipe');
+ if (readable === src) {
+ if (unpipeInfo && unpipeInfo.hasUnpiped === false) {
+ unpipeInfo.hasUnpiped = true;
+ cleanup();
+ }
+ }
}
- if (opts.currentTarget.indexOf(opts.gently) === 0) {
- opts.log.silly('gently link', 'delete existing link', to)
- return rm(to, opts, cb)
- } else {
- opts.log.verbose('gently link', 'refusing to delete existing link', {
- link: to,
- currentTarget: opts.currentTarget,
- newTarget: from,
- gently: opts.gently
- })
- return cb()
+ function onend() {
+ debug('onend');
+ dest.end();
}
-}
-
-
-/***/ }),
-/* 274 */,
-/* 275 */,
-/* 276 */
-/***/ (function(__unusedmodule, exports) {
-"use strict";
+ // when the dest drains, it reduces the awaitDrain counter
+ // on the source. This would be more elegant with a .once()
+ // handler in flow(), but adding and removing repeatedly is
+ // too slow.
+ var ondrain = pipeOnDrain(src);
+ dest.on('drain', ondrain);
-/*
- * Copyright The OpenTelemetry Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-Object.defineProperty(exports, "__esModule", { value: true });
-//# sourceMappingURL=Logger.js.map
+ var cleanedUp = false;
+ function cleanup() {
+ debug('cleanup');
+ // cleanup event handlers once the pipe is broken
+ dest.removeListener('close', onclose);
+ dest.removeListener('finish', onfinish);
+ dest.removeListener('drain', ondrain);
+ dest.removeListener('error', onerror);
+ dest.removeListener('unpipe', onunpipe);
+ src.removeListener('end', onend);
+ src.removeListener('end', unpipe);
+ src.removeListener('data', ondata);
-/***/ }),
-/* 277 */,
-/* 278 */
-/***/ (function(__unusedmodule, exports) {
+ cleanedUp = true;
-"use strict";
+ // if the reader is waiting for a drain event from this
+ // specific writer, then it would cause it to never start
+ // flowing again.
+ // So, if this is awaiting a drain, then we just call it now.
+ // If we don't know, then assume that we are waiting for one.
+ if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();
+ }
-/*
- * Copyright The OpenTelemetry Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-Object.defineProperty(exports, "__esModule", { value: true });
-//# sourceMappingURL=CorrelationContext.js.map
+ // If the user pushes more data while we're writing to dest then we'll end up
+ // in ondata again. However, we only want to increase awaitDrain once because
+ // dest will only emit one 'drain' event for the multiple writes.
+ // => Introduce a guard on increasing awaitDrain.
+ var increasedAwaitDrain = false;
+ src.on('data', ondata);
+ function ondata(chunk) {
+ debug('ondata');
+ increasedAwaitDrain = false;
+ var ret = dest.write(chunk);
+ if (false === ret && !increasedAwaitDrain) {
+ // If the user unpiped during `dest.write()`, it is possible
+ // to get stuck in a permanently paused state if that write
+ // also returned false.
+ // => Check whether `dest` is still a piping destination.
+ if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {
+ debug('false write response, pause', src._readableState.awaitDrain);
+ src._readableState.awaitDrain++;
+ increasedAwaitDrain = true;
+ }
+ src.pause();
+ }
+ }
-/***/ }),
-/* 279 */
-/***/ (function(module) {
+ // if the dest has an error, then stop piping into it.
+ // however, don't suppress the throwing behavior for this.
+ function onerror(er) {
+ debug('onerror', er);
+ unpipe();
+ dest.removeListener('error', onerror);
+ if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er);
+ }
-"use strict";
+ // Make sure our error handler is attached before userland ones.
+ prependListener(dest, 'error', onerror);
+ // Both close and finish should trigger unpipe, but only once.
+ function onclose() {
+ dest.removeListener('finish', onfinish);
+ unpipe();
+ }
+ dest.once('close', onclose);
+ function onfinish() {
+ debug('onfinish');
+ dest.removeListener('close', onclose);
+ unpipe();
+ }
+ dest.once('finish', onfinish);
-module.exports = cacheKey
-function cacheKey (type, identifier) {
- return ['pacote', type, identifier].join(':')
-}
+ function unpipe() {
+ debug('unpipe');
+ src.unpipe(dest);
+ }
+ // tell the dest that it's being piped to
+ dest.emit('pipe', src);
-/***/ }),
-/* 280 */
-/***/ (function(module, exports) {
+ // start the flow if it hasn't been started already.
+ if (!state.flowing) {
+ debug('pipe resume');
+ src.resume();
+ }
-exports = module.exports = SemVer
+ return dest;
+};
-var debug
-/* istanbul ignore next */
-if (typeof process === 'object' &&
- process.env &&
- process.env.NODE_DEBUG &&
- /\bsemver\b/i.test(process.env.NODE_DEBUG)) {
- debug = function () {
- var args = Array.prototype.slice.call(arguments, 0)
- args.unshift('SEMVER')
- console.log.apply(console, args)
- }
-} else {
- debug = function () {}
+function pipeOnDrain(src) {
+ return function () {
+ var state = src._readableState;
+ debug('pipeOnDrain', state.awaitDrain);
+ if (state.awaitDrain) state.awaitDrain--;
+ if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {
+ state.flowing = true;
+ flow(src);
+ }
+ };
}
-// Note: this is the semver.org version of the spec that it implements
-// Not necessarily the package version of this code.
-exports.SEMVER_SPEC_VERSION = '2.0.0'
+Readable.prototype.unpipe = function (dest) {
+ var state = this._readableState;
+ var unpipeInfo = { hasUnpiped: false };
-var MAX_LENGTH = 256
-var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
- /* istanbul ignore next */ 9007199254740991
+ // if we're not piping anywhere, then do nothing.
+ if (state.pipesCount === 0) return this;
-// Max safe segment length for coercion.
-var MAX_SAFE_COMPONENT_LENGTH = 16
+ // just one destination. most common case.
+ if (state.pipesCount === 1) {
+ // passed in one, but it's not the right one.
+ if (dest && dest !== state.pipes) return this;
-// The actual regexps go on exports.re
-var re = exports.re = []
-var src = exports.src = []
-var R = 0
+ if (!dest) dest = state.pipes;
-// The following Regular Expressions can be used for tokenizing,
-// validating, and parsing SemVer version strings.
+ // got a match.
+ state.pipes = null;
+ state.pipesCount = 0;
+ state.flowing = false;
+ if (dest) dest.emit('unpipe', this, unpipeInfo);
+ return this;
+ }
-// ## Numeric Identifier
-// A single `0`, or a non-zero digit followed by zero or more digits.
+ // slow case. multiple pipe destinations.
-var NUMERICIDENTIFIER = R++
-src[NUMERICIDENTIFIER] = '0|[1-9]\\d*'
-var NUMERICIDENTIFIERLOOSE = R++
-src[NUMERICIDENTIFIERLOOSE] = '[0-9]+'
+ if (!dest) {
+ // remove all.
+ var dests = state.pipes;
+ var len = state.pipesCount;
+ state.pipes = null;
+ state.pipesCount = 0;
+ state.flowing = false;
-// ## Non-numeric Identifier
-// Zero or more digits, followed by a letter or hyphen, and then zero or
-// more letters, digits, or hyphens.
+ for (var i = 0; i < len; i++) {
+ dests[i].emit('unpipe', this, unpipeInfo);
+ }return this;
+ }
-var NONNUMERICIDENTIFIER = R++
-src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
+ // try to find the right one.
+ var index = indexOf(state.pipes, dest);
+ if (index === -1) return this;
-// ## Main Version
-// Three dot-separated numeric identifiers.
+ state.pipes.splice(index, 1);
+ state.pipesCount -= 1;
+ if (state.pipesCount === 1) state.pipes = state.pipes[0];
-var MAINVERSION = R++
-src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' +
- '(' + src[NUMERICIDENTIFIER] + ')\\.' +
- '(' + src[NUMERICIDENTIFIER] + ')'
+ dest.emit('unpipe', this, unpipeInfo);
-var MAINVERSIONLOOSE = R++
-src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
- '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' +
- '(' + src[NUMERICIDENTIFIERLOOSE] + ')'
+ return this;
+};
-// ## Pre-release Version Identifier
-// A numeric identifier, or a non-numeric identifier.
+// set up data events if they are asked for
+// Ensure readable listeners eventually get something
+Readable.prototype.on = function (ev, fn) {
+ var res = Stream.prototype.on.call(this, ev, fn);
-var PRERELEASEIDENTIFIER = R++
-src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] +
- '|' + src[NONNUMERICIDENTIFIER] + ')'
+ if (ev === 'data') {
+ // Start flowing on next tick if stream isn't explicitly paused
+ if (this._readableState.flowing !== false) this.resume();
+ } else if (ev === 'readable') {
+ var state = this._readableState;
+ if (!state.endEmitted && !state.readableListening) {
+ state.readableListening = state.needReadable = true;
+ state.emittedReadable = false;
+ if (!state.reading) {
+ pna.nextTick(nReadingNextTick, this);
+ } else if (state.length) {
+ emitReadable(this);
+ }
+ }
+ }
-var PRERELEASEIDENTIFIERLOOSE = R++
-src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] +
- '|' + src[NONNUMERICIDENTIFIER] + ')'
+ return res;
+};
+Readable.prototype.addListener = Readable.prototype.on;
-// ## Pre-release Version
-// Hyphen, followed by one or more dot-separated pre-release version
-// identifiers.
-
-var PRERELEASE = R++
-src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] +
- '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))'
-
-var PRERELEASELOOSE = R++
-src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] +
- '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))'
-
-// ## Build Metadata Identifier
-// Any combination of digits, letters, or hyphens.
-
-var BUILDIDENTIFIER = R++
-src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+'
-
-// ## Build Metadata
-// Plus sign, followed by one or more period-separated build metadata
-// identifiers.
-
-var BUILD = R++
-src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] +
- '(?:\\.' + src[BUILDIDENTIFIER] + ')*))'
-
-// ## Full Version String
-// A main version, followed optionally by a pre-release version and
-// build metadata.
-
-// Note that the only major, minor, patch, and pre-release sections of
-// the version string are capturing groups. The build metadata is not a
-// capturing group, because it should not ever be used in version
-// comparison.
+function nReadingNextTick(self) {
+ debug('readable nexttick read 0');
+ self.read(0);
+}
-var FULL = R++
-var FULLPLAIN = 'v?' + src[MAINVERSION] +
- src[PRERELEASE] + '?' +
- src[BUILD] + '?'
+// pause() and resume() are remnants of the legacy readable stream API
+// If the user uses them, then switch into old mode.
+Readable.prototype.resume = function () {
+ var state = this._readableState;
+ if (!state.flowing) {
+ debug('resume');
+ state.flowing = true;
+ resume(this, state);
+ }
+ return this;
+};
-src[FULL] = '^' + FULLPLAIN + '$'
+function resume(stream, state) {
+ if (!state.resumeScheduled) {
+ state.resumeScheduled = true;
+ pna.nextTick(resume_, stream, state);
+ }
+}
-// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
-// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
-// common in the npm registry.
-var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] +
- src[PRERELEASELOOSE] + '?' +
- src[BUILD] + '?'
+function resume_(stream, state) {
+ if (!state.reading) {
+ debug('resume read 0');
+ stream.read(0);
+ }
-var LOOSE = R++
-src[LOOSE] = '^' + LOOSEPLAIN + '$'
+ state.resumeScheduled = false;
+ state.awaitDrain = 0;
+ stream.emit('resume');
+ flow(stream);
+ if (state.flowing && !state.reading) stream.read(0);
+}
-var GTLT = R++
-src[GTLT] = '((?:<|>)?=?)'
+Readable.prototype.pause = function () {
+ debug('call pause flowing=%j', this._readableState.flowing);
+ if (false !== this._readableState.flowing) {
+ debug('pause');
+ this._readableState.flowing = false;
+ this.emit('pause');
+ }
+ return this;
+};
-// Something like "2.*" or "1.2.x".
-// Note that "x.x" is a valid xRange identifer, meaning "any version"
-// Only the first item is strictly required.
-var XRANGEIDENTIFIERLOOSE = R++
-src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*'
-var XRANGEIDENTIFIER = R++
-src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*'
+function flow(stream) {
+ var state = stream._readableState;
+ debug('flow', state.flowing);
+ while (state.flowing && stream.read() !== null) {}
+}
-var XRANGEPLAIN = R++
-src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' +
- '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
- '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' +
- '(?:' + src[PRERELEASE] + ')?' +
- src[BUILD] + '?' +
- ')?)?'
+// wrap an old-style stream as the async data source.
+// This is *not* part of the readable stream interface.
+// It is an ugly unfortunate mess of history.
+Readable.prototype.wrap = function (stream) {
+ var _this = this;
-var XRANGEPLAINLOOSE = R++
-src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
- '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
- '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' +
- '(?:' + src[PRERELEASELOOSE] + ')?' +
- src[BUILD] + '?' +
- ')?)?'
+ var state = this._readableState;
+ var paused = false;
-var XRANGE = R++
-src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$'
-var XRANGELOOSE = R++
-src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$'
+ stream.on('end', function () {
+ debug('wrapped end');
+ if (state.decoder && !state.ended) {
+ var chunk = state.decoder.end();
+ if (chunk && chunk.length) _this.push(chunk);
+ }
-// Coercion.
-// Extract anything that could conceivably be a part of a valid semver
-var COERCE = R++
-src[COERCE] = '(?:^|[^\\d])' +
- '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' +
- '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
- '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' +
- '(?:$|[^\\d])'
+ _this.push(null);
+ });
-// Tilde ranges.
-// Meaning is "reasonably at or greater than"
-var LONETILDE = R++
-src[LONETILDE] = '(?:~>?)'
+ stream.on('data', function (chunk) {
+ debug('wrapped data');
+ if (state.decoder) chunk = state.decoder.write(chunk);
-var TILDETRIM = R++
-src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+'
-re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g')
-var tildeTrimReplace = '$1~'
+ // don't skip over falsy values in objectMode
+ if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;
-var TILDE = R++
-src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$'
-var TILDELOOSE = R++
-src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$'
+ var ret = _this.push(chunk);
+ if (!ret) {
+ paused = true;
+ stream.pause();
+ }
+ });
-// Caret ranges.
-// Meaning is "at least and backwards compatible with"
-var LONECARET = R++
-src[LONECARET] = '(?:\\^)'
+ // proxy all the other methods.
+ // important when wrapping filters and duplexes.
+ for (var i in stream) {
+ if (this[i] === undefined && typeof stream[i] === 'function') {
+ this[i] = function (method) {
+ return function () {
+ return stream[method].apply(stream, arguments);
+ };
+ }(i);
+ }
+ }
-var CARETTRIM = R++
-src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+'
-re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g')
-var caretTrimReplace = '$1^'
+ // proxy certain important events.
+ for (var n = 0; n < kProxyEvents.length; n++) {
+ stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n]));
+ }
-var CARET = R++
-src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$'
-var CARETLOOSE = R++
-src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$'
+ // when we try to consume some more bytes, simply unpause the
+ // underlying stream.
+ this._read = function (n) {
+ debug('wrapped _read', n);
+ if (paused) {
+ paused = false;
+ stream.resume();
+ }
+ };
-// A simple gt/lt/eq thing, or just "" to indicate "any version"
-var COMPARATORLOOSE = R++
-src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$'
-var COMPARATOR = R++
-src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$'
+ return this;
+};
-// An expression to strip any whitespace between the gtlt and the thing
-// it modifies, so that `> 1.2.3` ==> `>1.2.3`
-var COMPARATORTRIM = R++
-src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] +
- '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')'
+Object.defineProperty(Readable.prototype, 'readableHighWaterMark', {
+ // making it explicit this property is not enumerable
+ // because otherwise some prototype manipulation in
+ // userland will fail
+ enumerable: false,
+ get: function () {
+ return this._readableState.highWaterMark;
+ }
+});
-// this one has to use the /g flag
-re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g')
-var comparatorTrimReplace = '$1$2$3'
+// exposed for testing purposes only.
+Readable._fromList = fromList;
-// Something like `1.2.3 - 1.2.4`
-// Note that these all use the loose form, because they'll be
-// checked against either the strict or loose comparator form
-// later.
-var HYPHENRANGE = R++
-src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' +
- '\\s+-\\s+' +
- '(' + src[XRANGEPLAIN] + ')' +
- '\\s*$'
+// Pluck off n bytes from an array of buffers.
+// Length is the combined lengths of all the buffers in the list.
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
+function fromList(n, state) {
+ // nothing buffered
+ if (state.length === 0) return null;
-var HYPHENRANGELOOSE = R++
-src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' +
- '\\s+-\\s+' +
- '(' + src[XRANGEPLAINLOOSE] + ')' +
- '\\s*$'
+ var ret;
+ if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {
+ // read it all, truncate the list
+ if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length);
+ state.buffer.clear();
+ } else {
+ // read part of list
+ ret = fromListPartial(n, state.buffer, state.decoder);
+ }
-// Star ranges basically just allow anything at all.
-var STAR = R++
-src[STAR] = '(<|>)?=?\\s*\\*'
+ return ret;
+}
-// Compile to actual regexp objects.
-// All are flag-free, unless they were created above with a flag.
-for (var i = 0; i < R; i++) {
- debug(i, src[i])
- if (!re[i]) {
- re[i] = new RegExp(src[i])
+// Extracts only enough buffered data to satisfy the amount requested.
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
+function fromListPartial(n, list, hasStrings) {
+ var ret;
+ if (n < list.head.data.length) {
+ // slice is the same for buffers and strings
+ ret = list.head.data.slice(0, n);
+ list.head.data = list.head.data.slice(n);
+ } else if (n === list.head.data.length) {
+ // first chunk is a perfect match
+ ret = list.shift();
+ } else {
+ // result spans more than one buffer
+ ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list);
}
+ return ret;
}
-exports.parse = parse
-function parse (version, options) {
- if (!options || typeof options !== 'object') {
- options = {
- loose: !!options,
- includePrerelease: false
+// Copies a specified amount of characters from the list of buffered data
+// chunks.
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
+function copyFromBufferString(n, list) {
+ var p = list.head;
+ var c = 1;
+ var ret = p.data;
+ n -= ret.length;
+ while (p = p.next) {
+ var str = p.data;
+ var nb = n > str.length ? str.length : n;
+ if (nb === str.length) ret += str;else ret += str.slice(0, n);
+ n -= nb;
+ if (n === 0) {
+ if (nb === str.length) {
+ ++c;
+ if (p.next) list.head = p.next;else list.head = list.tail = null;
+ } else {
+ list.head = p;
+ p.data = str.slice(nb);
+ }
+ break;
}
+ ++c;
}
+ list.length -= c;
+ return ret;
+}
- if (version instanceof SemVer) {
- return version
+// Copies a specified amount of bytes from the list of buffered data chunks.
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
+function copyFromBuffer(n, list) {
+ var ret = Buffer.allocUnsafe(n);
+ var p = list.head;
+ var c = 1;
+ p.data.copy(ret);
+ n -= p.data.length;
+ while (p = p.next) {
+ var buf = p.data;
+ var nb = n > buf.length ? buf.length : n;
+ buf.copy(ret, ret.length - n, 0, nb);
+ n -= nb;
+ if (n === 0) {
+ if (nb === buf.length) {
+ ++c;
+ if (p.next) list.head = p.next;else list.head = list.tail = null;
+ } else {
+ list.head = p;
+ p.data = buf.slice(nb);
+ }
+ break;
+ }
+ ++c;
}
+ list.length -= c;
+ return ret;
+}
- if (typeof version !== 'string') {
- return null
- }
+function endReadable(stream) {
+ var state = stream._readableState;
- if (version.length > MAX_LENGTH) {
- return null
- }
+ // If we get here before consuming all the bytes, then that is a
+ // bug in node. Should never happen.
+ if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream');
- var r = options.loose ? re[LOOSE] : re[FULL]
- if (!r.test(version)) {
- return null
+ if (!state.endEmitted) {
+ state.ended = true;
+ pna.nextTick(endReadableNT, state, stream);
}
+}
- try {
- return new SemVer(version, options)
- } catch (er) {
- return null
+function endReadableNT(state, stream) {
+ // Check that we didn't get one last unshift.
+ if (!state.endEmitted && state.length === 0) {
+ state.endEmitted = true;
+ stream.readable = false;
+ stream.emit('end');
}
}
-exports.valid = valid
-function valid (version, options) {
- var v = parse(version, options)
- return v ? v.version : null
+function indexOf(xs, x) {
+ for (var i = 0, l = xs.length; i < l; i++) {
+ if (xs[i] === x) return i;
+ }
+ return -1;
}
-exports.clean = clean
-function clean (version, options) {
- var s = parse(version.trim().replace(/^[=v]+/, ''), options)
- return s ? s.version : null
-}
+/***/ }),
+/* 227 */,
+/* 228 */,
+/* 229 */
+/***/ (function(__unusedmodule, exports) {
-exports.SemVer = SemVer
+"use strict";
-function SemVer (version, options) {
- if (!options || typeof options !== 'object') {
- options = {
- loose: !!options,
- includePrerelease: false
- }
- }
- if (version instanceof SemVer) {
- if (version.loose === options.loose) {
- return version
- } else {
- version = version.version
- }
- } else if (typeof version !== 'string') {
- throw new TypeError('Invalid Version: ' + version)
- }
- if (version.length > MAX_LENGTH) {
- throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters')
- }
+Object.defineProperty(exports, '__esModule', { value: true });
- if (!(this instanceof SemVer)) {
- return new SemVer(version, options)
- }
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+/**
+ * A static-key-based credential that supports updating
+ * the underlying key value.
+ */
+var AzureKeyCredential = /** @class */ (function () {
+ /**
+ * Create an instance of an AzureKeyCredential for use
+ * with a service client.
+ *
+ * @param key the initial value of the key to use in authentication
+ */
+ function AzureKeyCredential(key) {
+ if (!key) {
+ throw new Error("key must be a non-empty string");
+ }
+ this._key = key;
+ }
+ Object.defineProperty(AzureKeyCredential.prototype, "key", {
+ /**
+ * The value of the key to be used in authentication
+ */
+ get: function () {
+ return this._key;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ /**
+ * Change the value of the key.
+ *
+ * Updates will take effect upon the next request after
+ * updating the key value.
+ *
+ * @param newKey the new key value to be used
+ */
+ AzureKeyCredential.prototype.update = function (newKey) {
+ this._key = newKey;
+ };
+ return AzureKeyCredential;
+}());
- debug('SemVer', version, options)
- this.options = options
- this.loose = !!options.loose
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+/**
+ * Tests an object to determine whether it implements TokenCredential.
+ *
+ * @param credential The assumed TokenCredential to be tested.
+ */
+function isTokenCredential(credential) {
+ // Check for an object with a 'getToken' function and possibly with
+ // a 'signRequest' function. We do this check to make sure that
+ // a ServiceClientCredentials implementor (like TokenClientCredentials
+ // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if
+ // it doesn't actually implement TokenCredential also.
+ return (credential &&
+ typeof credential.getToken === "function" &&
+ (credential.signRequest === undefined || credential.getToken.length > 0));
+}
- var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL])
+exports.AzureKeyCredential = AzureKeyCredential;
+exports.isTokenCredential = isTokenCredential;
+//# sourceMappingURL=index.js.map
- if (!m) {
- throw new TypeError('Invalid Version: ' + version)
- }
- this.raw = version
+/***/ }),
+/* 230 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
- // these are actually numbers
- this.major = +m[1]
- this.minor = +m[2]
- this.patch = +m[3]
+"use strict";
- if (this.major > MAX_SAFE_INTEGER || this.major < 0) {
- throw new TypeError('Invalid major version')
- }
- if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) {
- throw new TypeError('Invalid minor version')
- }
+module.exports = __webpack_require__(412)
- if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) {
- throw new TypeError('Invalid patch version')
- }
- // numberify any prerelease numeric ids
- if (!m[4]) {
- this.prerelease = []
- } else {
- this.prerelease = m[4].split('.').map(function (id) {
- if (/^[0-9]+$/.test(id)) {
- var num = +id
- if (num >= 0 && num < MAX_SAFE_INTEGER) {
- return num
- }
- }
- return id
- })
- }
+/***/ }),
+/* 231 */,
+/* 232 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
- this.build = m[5] ? m[5].split('.') : []
- this.format()
-}
+"use strict";
-SemVer.prototype.format = function () {
- this.version = this.major + '.' + this.minor + '.' + this.patch
- if (this.prerelease.length) {
- this.version += '-' + this.prerelease.join('.')
- }
- return this.version
-}
-SemVer.prototype.toString = function () {
- return this.version
-}
+/**/
-SemVer.prototype.compare = function (other) {
- debug('SemVer.compare', this.version, this.options, other)
- if (!(other instanceof SemVer)) {
- other = new SemVer(other, this.options)
- }
+var pna = __webpack_require__(78);
+/**/
- return this.compareMain(other) || this.comparePre(other)
-}
+// undocumented cb() API, needed for core, not for public API
+function destroy(err, cb) {
+ var _this = this;
-SemVer.prototype.compareMain = function (other) {
- if (!(other instanceof SemVer)) {
- other = new SemVer(other, this.options)
+ var readableDestroyed = this._readableState && this._readableState.destroyed;
+ var writableDestroyed = this._writableState && this._writableState.destroyed;
+
+ if (readableDestroyed || writableDestroyed) {
+ if (cb) {
+ cb(err);
+ } else if (err && (!this._writableState || !this._writableState.errorEmitted)) {
+ pna.nextTick(emitErrorNT, this, err);
+ }
+ return this;
}
- return compareIdentifiers(this.major, other.major) ||
- compareIdentifiers(this.minor, other.minor) ||
- compareIdentifiers(this.patch, other.patch)
-}
+ // we set destroyed to true before firing error callbacks in order
+ // to make it re-entrance safe in case destroy() is called within callbacks
-SemVer.prototype.comparePre = function (other) {
- if (!(other instanceof SemVer)) {
- other = new SemVer(other, this.options)
+ if (this._readableState) {
+ this._readableState.destroyed = true;
}
- // NOT having a prerelease is > having one
- if (this.prerelease.length && !other.prerelease.length) {
- return -1
- } else if (!this.prerelease.length && other.prerelease.length) {
- return 1
- } else if (!this.prerelease.length && !other.prerelease.length) {
- return 0
+ // if this is a duplex stream mark the writable part as destroyed as well
+ if (this._writableState) {
+ this._writableState.destroyed = true;
}
- var i = 0
- do {
- var a = this.prerelease[i]
- var b = other.prerelease[i]
- debug('prerelease compare', i, a, b)
- if (a === undefined && b === undefined) {
- return 0
- } else if (b === undefined) {
- return 1
- } else if (a === undefined) {
- return -1
- } else if (a === b) {
- continue
- } else {
- return compareIdentifiers(a, b)
- }
- } while (++i)
-}
-
-// preminor will bump the version up to the next minor release, and immediately
-// down to pre-release. premajor and prepatch work the same way.
-SemVer.prototype.inc = function (release, identifier) {
- switch (release) {
- case 'premajor':
- this.prerelease.length = 0
- this.patch = 0
- this.minor = 0
- this.major++
- this.inc('pre', identifier)
- break
- case 'preminor':
- this.prerelease.length = 0
- this.patch = 0
- this.minor++
- this.inc('pre', identifier)
- break
- case 'prepatch':
- // If this is already a prerelease, it will bump to the next version
- // drop any prereleases that might already exist, since they are not
- // relevant at this point.
- this.prerelease.length = 0
- this.inc('patch', identifier)
- this.inc('pre', identifier)
- break
- // If the input is a non-prerelease version, this acts the same as
- // prepatch.
- case 'prerelease':
- if (this.prerelease.length === 0) {
- this.inc('patch', identifier)
- }
- this.inc('pre', identifier)
- break
-
- case 'major':
- // If this is a pre-major version, bump up to the same major version.
- // Otherwise increment major.
- // 1.0.0-5 bumps to 1.0.0
- // 1.1.0 bumps to 2.0.0
- if (this.minor !== 0 ||
- this.patch !== 0 ||
- this.prerelease.length === 0) {
- this.major++
- }
- this.minor = 0
- this.patch = 0
- this.prerelease = []
- break
- case 'minor':
- // If this is a pre-minor version, bump up to the same minor version.
- // Otherwise increment minor.
- // 1.2.0-5 bumps to 1.2.0
- // 1.2.1 bumps to 1.3.0
- if (this.patch !== 0 || this.prerelease.length === 0) {
- this.minor++
- }
- this.patch = 0
- this.prerelease = []
- break
- case 'patch':
- // If this is not a pre-release version, it will increment the patch.
- // If it is a pre-release it will bump up to the same patch version.
- // 1.2.0-5 patches to 1.2.0
- // 1.2.0 patches to 1.2.1
- if (this.prerelease.length === 0) {
- this.patch++
- }
- this.prerelease = []
- break
- // This probably shouldn't be used publicly.
- // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction.
- case 'pre':
- if (this.prerelease.length === 0) {
- this.prerelease = [0]
- } else {
- var i = this.prerelease.length
- while (--i >= 0) {
- if (typeof this.prerelease[i] === 'number') {
- this.prerelease[i]++
- i = -2
- }
- }
- if (i === -1) {
- // didn't increment anything
- this.prerelease.push(0)
- }
- }
- if (identifier) {
- // 1.2.0-beta.1 bumps to 1.2.0-beta.2,
- // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0
- if (this.prerelease[0] === identifier) {
- if (isNaN(this.prerelease[1])) {
- this.prerelease = [identifier, 0]
- }
- } else {
- this.prerelease = [identifier, 0]
- }
+ this._destroy(err || null, function (err) {
+ if (!cb && err) {
+ pna.nextTick(emitErrorNT, _this, err);
+ if (_this._writableState) {
+ _this._writableState.errorEmitted = true;
}
- break
+ } else if (cb) {
+ cb(err);
+ }
+ });
- default:
- throw new Error('invalid increment argument: ' + release)
- }
- this.format()
- this.raw = this.version
- return this
+ return this;
}
-exports.inc = inc
-function inc (version, release, loose, identifier) {
- if (typeof (loose) === 'string') {
- identifier = loose
- loose = undefined
+function undestroy() {
+ if (this._readableState) {
+ this._readableState.destroyed = false;
+ this._readableState.reading = false;
+ this._readableState.ended = false;
+ this._readableState.endEmitted = false;
}
- try {
- return new SemVer(version, loose).inc(release, identifier).version
- } catch (er) {
- return null
+ if (this._writableState) {
+ this._writableState.destroyed = false;
+ this._writableState.ended = false;
+ this._writableState.ending = false;
+ this._writableState.finished = false;
+ this._writableState.errorEmitted = false;
}
}
-exports.diff = diff
-function diff (version1, version2) {
- if (eq(version1, version2)) {
- return null
- } else {
- var v1 = parse(version1)
- var v2 = parse(version2)
- var prefix = ''
- if (v1.prerelease.length || v2.prerelease.length) {
- prefix = 'pre'
- var defaultResult = 'prerelease'
- }
- for (var key in v1) {
- if (key === 'major' || key === 'minor' || key === 'patch') {
- if (v1[key] !== v2[key]) {
- return prefix + key
- }
- }
- }
- return defaultResult // may be undefined
- }
+function emitErrorNT(self, err) {
+ self.emit('error', err);
}
-exports.compareIdentifiers = compareIdentifiers
-
-var numeric = /^[0-9]+$/
-function compareIdentifiers (a, b) {
- var anum = numeric.test(a)
- var bnum = numeric.test(b)
-
- if (anum && bnum) {
- a = +a
- b = +b
- }
+module.exports = {
+ destroy: destroy,
+ undestroy: undestroy
+};
- return a === b ? 0
- : (anum && !bnum) ? -1
- : (bnum && !anum) ? 1
- : a < b ? -1
- : 1
-}
+/***/ }),
+/* 233 */
+/***/ (function(module, exports, __webpack_require__) {
-exports.rcompareIdentifiers = rcompareIdentifiers
-function rcompareIdentifiers (a, b) {
- return compareIdentifiers(b, a)
-}
+/* eslint-disable node/no-deprecated-api */
+var buffer = __webpack_require__(293)
+var Buffer = buffer.Buffer
-exports.major = major
-function major (a, loose) {
- return new SemVer(a, loose).major
+// alternative to using Object.keys for old browsers
+function copyProps (src, dst) {
+ for (var key in src) {
+ dst[key] = src[key]
+ }
}
-
-exports.minor = minor
-function minor (a, loose) {
- return new SemVer(a, loose).minor
+if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {
+ module.exports = buffer
+} else {
+ // Copy properties from require('buffer')
+ copyProps(buffer, exports)
+ exports.Buffer = SafeBuffer
}
-exports.patch = patch
-function patch (a, loose) {
- return new SemVer(a, loose).patch
+function SafeBuffer (arg, encodingOrOffset, length) {
+ return Buffer(arg, encodingOrOffset, length)
}
-exports.compare = compare
-function compare (a, b, loose) {
- return new SemVer(a, loose).compare(new SemVer(b, loose))
-}
+// Copy static methods from Buffer
+copyProps(Buffer, SafeBuffer)
-exports.compareLoose = compareLoose
-function compareLoose (a, b) {
- return compare(a, b, true)
+SafeBuffer.from = function (arg, encodingOrOffset, length) {
+ if (typeof arg === 'number') {
+ throw new TypeError('Argument must not be a number')
+ }
+ return Buffer(arg, encodingOrOffset, length)
}
-exports.rcompare = rcompare
-function rcompare (a, b, loose) {
- return compare(b, a, loose)
+SafeBuffer.alloc = function (size, fill, encoding) {
+ if (typeof size !== 'number') {
+ throw new TypeError('Argument must be a number')
+ }
+ var buf = Buffer(size)
+ if (fill !== undefined) {
+ if (typeof encoding === 'string') {
+ buf.fill(fill, encoding)
+ } else {
+ buf.fill(fill)
+ }
+ } else {
+ buf.fill(0)
+ }
+ return buf
}
-exports.sort = sort
-function sort (list, loose) {
- return list.sort(function (a, b) {
- return exports.compare(a, b, loose)
- })
+SafeBuffer.allocUnsafe = function (size) {
+ if (typeof size !== 'number') {
+ throw new TypeError('Argument must be a number')
+ }
+ return Buffer(size)
}
-exports.rsort = rsort
-function rsort (list, loose) {
- return list.sort(function (a, b) {
- return exports.rcompare(a, b, loose)
- })
+SafeBuffer.allocUnsafeSlow = function (size) {
+ if (typeof size !== 'number') {
+ throw new TypeError('Argument must be a number')
+ }
+ return buffer.SlowBuffer(size)
}
-exports.gt = gt
-function gt (a, b, loose) {
- return compare(a, b, loose) > 0
-}
-exports.lt = lt
-function lt (a, b, loose) {
- return compare(a, b, loose) < 0
-}
+/***/ }),
+/* 234 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
-exports.eq = eq
-function eq (a, b, loose) {
- return compare(a, b, loose) === 0
-}
+"use strict";
-exports.neq = neq
-function neq (a, b, loose) {
- return compare(a, b, loose) !== 0
-}
+__webpack_require__(812);
+const inherits = __webpack_require__(669).inherits;
+const promisify = __webpack_require__(662);
+const EventEmitter = __webpack_require__(614).EventEmitter;
-exports.gte = gte
-function gte (a, b, loose) {
- return compare(a, b, loose) >= 0
-}
+module.exports = Agent;
-exports.lte = lte
-function lte (a, b, loose) {
- return compare(a, b, loose) <= 0
+function isAgent(v) {
+ return v && typeof v.addRequest === 'function';
}
-exports.cmp = cmp
-function cmp (a, op, b, loose) {
- switch (op) {
- case '===':
- if (typeof a === 'object')
- a = a.version
- if (typeof b === 'object')
- b = b.version
- return a === b
+/**
+ * Base `http.Agent` implementation.
+ * No pooling/keep-alive is implemented by default.
+ *
+ * @param {Function} callback
+ * @api public
+ */
+function Agent(callback, _opts) {
+ if (!(this instanceof Agent)) {
+ return new Agent(callback, _opts);
+ }
- case '!==':
- if (typeof a === 'object')
- a = a.version
- if (typeof b === 'object')
- b = b.version
- return a !== b
+ EventEmitter.call(this);
- case '':
- case '=':
- case '==':
- return eq(a, b, loose)
+ // The callback gets promisified if it has 3 parameters
+ // (i.e. it has a callback function) lazily
+ this._promisifiedCallback = false;
- case '!=':
- return neq(a, b, loose)
+ let opts = _opts;
+ if ('function' === typeof callback) {
+ this.callback = callback;
+ } else if (callback) {
+ opts = callback;
+ }
- case '>':
- return gt(a, b, loose)
+ // timeout for the socket to be returned from the callback
+ this.timeout = (opts && opts.timeout) || null;
- case '>=':
- return gte(a, b, loose)
+ this.options = opts;
+}
+inherits(Agent, EventEmitter);
- case '<':
- return lt(a, b, loose)
+/**
+ * Override this function in your subclass!
+ */
+Agent.prototype.callback = function callback(req, opts) {
+ throw new Error(
+ '"agent-base" has no default implementation, you must subclass and override `callback()`'
+ );
+};
- case '<=':
- return lte(a, b, loose)
+/**
+ * Called by node-core's "_http_client.js" module when creating
+ * a new HTTP request with this Agent instance.
+ *
+ * @api public
+ */
+Agent.prototype.addRequest = function addRequest(req, _opts) {
+ const ownOpts = Object.assign({}, _opts);
- default:
- throw new TypeError('Invalid operator: ' + op)
+ // Set default `host` for HTTP to localhost
+ if (null == ownOpts.host) {
+ ownOpts.host = 'localhost';
}
-}
-exports.Comparator = Comparator
-function Comparator (comp, options) {
- if (!options || typeof options !== 'object') {
- options = {
- loose: !!options,
- includePrerelease: false
- }
+ // Set default `port` for HTTP if none was explicitly specified
+ if (null == ownOpts.port) {
+ ownOpts.port = ownOpts.secureEndpoint ? 443 : 80;
}
- if (comp instanceof Comparator) {
- if (comp.loose === !!options.loose) {
- return comp
- } else {
- comp = comp.value
- }
- }
+ const opts = Object.assign({}, this.options, ownOpts);
- if (!(this instanceof Comparator)) {
- return new Comparator(comp, options)
+ if (opts.host && opts.path) {
+ // If both a `host` and `path` are specified then it's most likely the
+ // result of a `url.parse()` call... we need to remove the `path` portion so
+ // that `net.connect()` doesn't attempt to open that as a unix socket file.
+ delete opts.path;
}
- debug('comparator', comp, options)
- this.options = options
- this.loose = !!options.loose
- this.parse(comp)
+ delete opts.agent;
+ delete opts.hostname;
+ delete opts._defaultAgent;
+ delete opts.defaultPort;
+ delete opts.createConnection;
- if (this.semver === ANY) {
- this.value = ''
- } else {
- this.value = this.operator + this.semver.version
- }
+ // Hint to use "Connection: close"
+ // XXX: non-documented `http` module API :(
+ req._last = true;
+ req.shouldKeepAlive = false;
- debug('comp', this)
-}
-
-var ANY = {}
-Comparator.prototype.parse = function (comp) {
- var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR]
- var m = comp.match(r)
+ // Create the `stream.Duplex` instance
+ let timeout;
+ let timedOut = false;
+ const timeoutMs = this.timeout;
+ const freeSocket = this.freeSocket;
- if (!m) {
- throw new TypeError('Invalid comparator: ' + comp)
+ function onerror(err) {
+ if (req._hadError) return;
+ req.emit('error', err);
+ // For Safety. Some additional errors might fire later on
+ // and we need to make sure we don't double-fire the error event.
+ req._hadError = true;
}
- this.operator = m[1]
- if (this.operator === '=') {
- this.operator = ''
+ function ontimeout() {
+ timeout = null;
+ timedOut = true;
+ const err = new Error(
+ 'A "socket" was not created for HTTP request before ' + timeoutMs + 'ms'
+ );
+ err.code = 'ETIMEOUT';
+ onerror(err);
}
- // if it literally is just '>' or '' then allow anything.
- if (!m[2]) {
- this.semver = ANY
- } else {
- this.semver = new SemVer(m[2], this.options.loose)
+ function callbackError(err) {
+ if (timedOut) return;
+ if (timeout != null) {
+ clearTimeout(timeout);
+ timeout = null;
+ }
+ onerror(err);
}
-}
-Comparator.prototype.toString = function () {
- return this.value
-}
+ function onsocket(socket) {
+ if (timedOut) return;
+ if (timeout != null) {
+ clearTimeout(timeout);
+ timeout = null;
+ }
+ if (isAgent(socket)) {
+ // `socket` is actually an http.Agent instance, so relinquish
+ // responsibility for this `req` to the Agent from here on
+ socket.addRequest(req, opts);
+ } else if (socket) {
+ function onfree() {
+ freeSocket(socket, opts);
+ }
+ socket.on('free', onfree);
+ req.onSocket(socket);
+ } else {
+ const err = new Error(
+ 'no Duplex stream was returned to agent-base for `' + req.method + ' ' + req.path + '`'
+ );
+ onerror(err);
+ }
+ }
-Comparator.prototype.test = function (version) {
- debug('Comparator.test', version, this.options.loose)
+ if (!this._promisifiedCallback && this.callback.length >= 3) {
+ // Legacy callback function - convert to a Promise
+ this.callback = promisify(this.callback, this);
+ this._promisifiedCallback = true;
+ }
- if (this.semver === ANY) {
- return true
+ if (timeoutMs > 0) {
+ timeout = setTimeout(ontimeout, timeoutMs);
}
- if (typeof version === 'string') {
- version = new SemVer(version, this.options)
+ try {
+ Promise.resolve(this.callback(req, opts)).then(onsocket, callbackError);
+ } catch (err) {
+ Promise.reject(err).catch(callbackError);
}
+};
- return cmp(version, this.operator, this.semver, this.options)
-}
+Agent.prototype.freeSocket = function freeSocket(socket, opts) {
+ // TODO reuse sockets
+ socket.destroy();
+};
-Comparator.prototype.intersects = function (comp, options) {
- if (!(comp instanceof Comparator)) {
- throw new TypeError('a Comparator is required')
- }
- if (!options || typeof options !== 'object') {
- options = {
- loose: !!options,
- includePrerelease: false
- }
- }
+/***/ }),
+/* 235 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
- var rangeTmp
+"use strict";
- if (this.operator === '') {
- rangeTmp = new Range(comp.value, options)
- return satisfies(this.value, rangeTmp, options)
- } else if (comp.operator === '') {
- rangeTmp = new Range(this.value, options)
- return satisfies(comp.semver, rangeTmp, options)
+var util = __webpack_require__(669)
+var stream = __webpack_require__(914)
+var delegate = __webpack_require__(967)
+var Tracker = __webpack_require__(623)
+
+var TrackerStream = module.exports = function (name, size, options) {
+ stream.Transform.call(this, options)
+ this.tracker = new Tracker(name, size)
+ this.name = name
+ this.id = this.tracker.id
+ this.tracker.on('change', delegateChange(this))
+}
+util.inherits(TrackerStream, stream.Transform)
+
+function delegateChange (trackerStream) {
+ return function (name, completion, tracker) {
+ trackerStream.emit('change', name, completion, trackerStream)
}
+}
- var sameDirectionIncreasing =
- (this.operator === '>=' || this.operator === '>') &&
- (comp.operator === '>=' || comp.operator === '>')
- var sameDirectionDecreasing =
- (this.operator === '<=' || this.operator === '<') &&
- (comp.operator === '<=' || comp.operator === '<')
- var sameSemVer = this.semver.version === comp.semver.version
- var differentDirectionsInclusive =
- (this.operator === '>=' || this.operator === '<=') &&
- (comp.operator === '>=' || comp.operator === '<=')
- var oppositeDirectionsLessThan =
- cmp(this.semver, '<', comp.semver, options) &&
- ((this.operator === '>=' || this.operator === '>') &&
- (comp.operator === '<=' || comp.operator === '<'))
- var oppositeDirectionsGreaterThan =
- cmp(this.semver, '>', comp.semver, options) &&
- ((this.operator === '<=' || this.operator === '<') &&
- (comp.operator === '>=' || comp.operator === '>'))
+TrackerStream.prototype._transform = function (data, encoding, cb) {
+ this.tracker.completeWork(data.length ? data.length : 1)
+ this.push(data)
+ cb()
+}
- return sameDirectionIncreasing || sameDirectionDecreasing ||
- (sameSemVer && differentDirectionsInclusive) ||
- oppositeDirectionsLessThan || oppositeDirectionsGreaterThan
+TrackerStream.prototype._flush = function (cb) {
+ this.tracker.finish()
+ cb()
}
-exports.Range = Range
-function Range (range, options) {
- if (!options || typeof options !== 'object') {
- options = {
- loose: !!options,
- includePrerelease: false
- }
- }
+delegate(TrackerStream.prototype, 'tracker')
+ .method('completed')
+ .method('addWork')
+ .method('finish')
- if (range instanceof Range) {
- if (range.loose === !!options.loose &&
- range.includePrerelease === !!options.includePrerelease) {
- return range
- } else {
- return new Range(range.raw, options)
- }
- }
- if (range instanceof Comparator) {
- return new Range(range.value, options)
- }
+/***/ }),
+/* 236 */,
+/* 237 */
+/***/ (function(module) {
- if (!(this instanceof Range)) {
- return new Range(range, options)
- }
+module.exports = {"name":"make-fetch-happen","version":"5.0.2","description":"Opinionated, caching, retrying fetch client","main":"index.js","files":["*.js","lib"],"scripts":{"prerelease":"npm t","release":"standard-version -s","postrelease":"npm publish --tag=legacy && git push --follow-tags","pretest":"standard","test":"tap --coverage --nyc-arg=--all --timeout=35 -J test/*.js","update-coc":"weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'","update-contrib":"weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"},"repository":"https://github.com/zkat/make-fetch-happen","keywords":["http","request","fetch","mean girls","caching","cache","subresource integrity"],"author":{"name":"Kat Marchán","email":"kzm@zkat.tech","twitter":"maybekatz"},"license":"ISC","dependencies":{"agentkeepalive":"^3.4.1","cacache":"^12.0.0","http-cache-semantics":"^3.8.1","http-proxy-agent":"^2.1.0","https-proxy-agent":"^2.2.3","lru-cache":"^5.1.1","mississippi":"^3.0.0","node-fetch-npm":"^2.0.2","promise-retry":"^1.1.1","socks-proxy-agent":"^4.0.0","ssri":"^6.0.0"},"devDependencies":{"bluebird":"^3.5.1","mkdirp":"^0.5.1","nock":"^9.2.3","npmlog":"^4.1.2","require-inject":"^1.4.2","rimraf":"^2.6.2","safe-buffer":"^5.1.1","standard":"^11.0.1","standard-version":"^4.3.0","tacks":"^1.2.6","tap":"^12.7.0","weallbehave":"^1.0.0","weallcontribute":"^1.0.7"}};
- this.options = options
- this.loose = !!options.loose
- this.includePrerelease = !!options.includePrerelease
+/***/ }),
+/* 238 */
+/***/ (function(__unusedmodule, exports) {
- // First, split based on boolean or ||
- this.raw = range
- this.set = range.split(/\s*\|\|\s*/).map(function (range) {
- return this.parseRange(range.trim())
- }, this).filter(function (c) {
- // throw out any that are not relevant for whatever reason
- return c.length
- })
+"use strict";
- if (!this.set.length) {
- throw new TypeError('Invalid SemVer Range: ' + range)
- }
- this.format()
-}
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = void 0;
+var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;
+exports.default = _default;
-Range.prototype.format = function () {
- this.range = this.set.map(function (comps) {
- return comps.join(' ').trim()
- }).join('||').trim()
- return this.range
+/***/ }),
+/* 239 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+var validate = __webpack_require__(285)
+var renderTemplate = __webpack_require__(505)
+var wideTruncate = __webpack_require__(504)
+var stringWidth = __webpack_require__(66)
+
+module.exports = function (theme, width, completed) {
+ validate('ONN', [theme, width, completed])
+ if (completed < 0) completed = 0
+ if (completed > 1) completed = 1
+ if (width <= 0) return ''
+ var sofar = Math.round(width * completed)
+ var rest = width - sofar
+ var template = [
+ {type: 'complete', value: repeat(theme.complete, sofar), length: sofar},
+ {type: 'remaining', value: repeat(theme.remaining, rest), length: rest}
+ ]
+ return renderTemplate(width, template, theme)
}
-Range.prototype.toString = function () {
- return this.range
+// lodash's way of repeating
+function repeat (string, width) {
+ var result = ''
+ var n = width
+ do {
+ if (n % 2) {
+ result += string
+ }
+ n = Math.floor(n / 2)
+ /*eslint no-self-assign: 0*/
+ string += string
+ } while (n && stringWidth(result) < width)
+
+ return wideTruncate(result, width)
}
-Range.prototype.parseRange = function (range) {
- var loose = this.options.loose
- range = range.trim()
- // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
- var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE]
- range = range.replace(hr, hyphenReplace)
- debug('hyphen replace', range)
- // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
- range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace)
- debug('comparator trim', range, re[COMPARATORTRIM])
- // `~ 1.2.3` => `~1.2.3`
- range = range.replace(re[TILDETRIM], tildeTrimReplace)
+/***/ }),
+/* 240 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
- // `^ 1.2.3` => `^1.2.3`
- range = range.replace(re[CARETTRIM], caretTrimReplace)
+"use strict";
- // normalize spaces
- range = range.split(/\s+/).join(' ')
+const LRU = __webpack_require__(567)
+const url = __webpack_require__(835)
- // At this point, the range is completely trimmed and
- // ready to be split into comparators.
+let AGENT_CACHE = new LRU({ max: 50 })
+let HttpsAgent
+let HttpAgent
- var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR]
- var set = range.split(' ').map(function (comp) {
- return parseComparator(comp, this.options)
- }, this).join(' ').split(/\s+/)
- if (this.options.loose) {
- // in loose mode, throw out any that are not valid comparators
- set = set.filter(function (comp) {
- return !!comp.match(compRe)
- })
+module.exports = getAgent
+
+function getAgent (uri, opts) {
+ const parsedUri = url.parse(typeof uri === 'string' ? uri : uri.url)
+ const isHttps = parsedUri.protocol === 'https:'
+ const pxuri = getProxyUri(uri, opts)
+
+ const key = [
+ `https:${isHttps}`,
+ pxuri
+ ? `proxy:${pxuri.protocol}//${pxuri.host}:${pxuri.port}`
+ : '>no-proxy<',
+ `local-address:${opts.localAddress || '>no-local-address<'}`,
+ `strict-ssl:${isHttps ? !!opts.strictSSL : '>no-strict-ssl<'}`,
+ `ca:${(isHttps && opts.ca) || '>no-ca<'}`,
+ `cert:${(isHttps && opts.cert) || '>no-cert<'}`,
+ `key:${(isHttps && opts.key) || '>no-key<'}`
+ ].join(':')
+
+ if (opts.agent != null) { // `agent: false` has special behavior!
+ return opts.agent
}
- set = set.map(function (comp) {
- return new Comparator(comp, this.options)
- }, this)
- return set
-}
+ if (AGENT_CACHE.peek(key)) {
+ return AGENT_CACHE.get(key)
+ }
-Range.prototype.intersects = function (range, options) {
- if (!(range instanceof Range)) {
- throw new TypeError('a Range is required')
+ if (pxuri) {
+ const proxy = getProxy(pxuri, opts, isHttps)
+ AGENT_CACHE.set(key, proxy)
+ return proxy
}
- return this.set.some(function (thisComparators) {
- return thisComparators.every(function (thisComparator) {
- return range.set.some(function (rangeComparators) {
- return rangeComparators.every(function (rangeComparator) {
- return thisComparator.intersects(rangeComparator, options)
- })
- })
- })
+ if (isHttps && !HttpsAgent) {
+ HttpsAgent = __webpack_require__(112).HttpsAgent
+ } else if (!isHttps && !HttpAgent) {
+ HttpAgent = __webpack_require__(112)
+ }
+
+ // If opts.timeout is zero, set the agentTimeout to zero as well. A timeout
+ // of zero disables the timeout behavior (OS limits still apply). Else, if
+ // opts.timeout is a non-zero value, set it to timeout + 1, to ensure that
+ // the node-fetch-npm timeout will always fire first, giving us more
+ // consistent errors.
+ const agentTimeout = opts.timeout === 0 ? 0 : opts.timeout + 1
+
+ const agent = isHttps ? new HttpsAgent({
+ maxSockets: opts.maxSockets || 15,
+ ca: opts.ca,
+ cert: opts.cert,
+ key: opts.key,
+ localAddress: opts.localAddress,
+ rejectUnauthorized: opts.strictSSL,
+ timeout: agentTimeout
+ }) : new HttpAgent({
+ maxSockets: opts.maxSockets || 15,
+ localAddress: opts.localAddress,
+ timeout: agentTimeout
})
+ AGENT_CACHE.set(key, agent)
+ return agent
}
-// Mostly just for testing and legacy API reasons
-exports.toComparators = toComparators
-function toComparators (range, options) {
- return new Range(range, options).set.map(function (comp) {
- return comp.map(function (c) {
- return c.value
- }).join(' ').trim().split(' ')
+function checkNoProxy (uri, opts) {
+ const host = url.parse(uri).hostname.split('.').reverse()
+ let noproxy = (opts.noProxy || getProcessEnv('no_proxy'))
+ if (typeof noproxy === 'string') {
+ noproxy = noproxy.split(/\s*,\s*/g)
+ }
+ return noproxy && noproxy.some(no => {
+ const noParts = no.split('.').filter(x => x).reverse()
+ if (!noParts.length) { return false }
+ for (let i = 0; i < noParts.length; i++) {
+ if (host[i] !== noParts[i]) {
+ return false
+ }
+ }
+ return true
})
}
-// comprised of xranges, tildes, stars, and gtlt's at this point.
-// already replaced the hyphen ranges
-// turn into a set of JUST comparators.
-function parseComparator (comp, options) {
- debug('comp', comp, options)
- comp = replaceCarets(comp, options)
- debug('caret', comp)
- comp = replaceTildes(comp, options)
- debug('tildes', comp)
- comp = replaceXRanges(comp, options)
- debug('xrange', comp)
- comp = replaceStars(comp, options)
- debug('stars', comp)
- return comp
-}
+module.exports.getProcessEnv = getProcessEnv
-function isX (id) {
- return !id || id.toLowerCase() === 'x' || id === '*'
+function getProcessEnv (env) {
+ if (!env) { return }
+
+ let value
+
+ if (Array.isArray(env)) {
+ for (let e of env) {
+ value = process.env[e] ||
+ process.env[e.toUpperCase()] ||
+ process.env[e.toLowerCase()]
+ if (typeof value !== 'undefined') { break }
+ }
+ }
+
+ if (typeof env === 'string') {
+ value = process.env[env] ||
+ process.env[env.toUpperCase()] ||
+ process.env[env.toLowerCase()]
+ }
+
+ return value
}
-// ~, ~> --> * (any, kinda silly)
-// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
-// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
-// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
-// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
-// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
-function replaceTildes (comp, options) {
- return comp.trim().split(/\s+/).map(function (comp) {
- return replaceTilde(comp, options)
- }).join(' ')
+function getProxyUri (uri, opts) {
+ const protocol = url.parse(uri).protocol
+
+ const proxy = opts.proxy || (
+ protocol === 'https:' && getProcessEnv('https_proxy')
+ ) || (
+ protocol === 'http:' && getProcessEnv(['https_proxy', 'http_proxy', 'proxy'])
+ )
+ if (!proxy) { return null }
+
+ const parsedProxy = (typeof proxy === 'string') ? url.parse(proxy) : proxy
+
+ return !checkNoProxy(uri, opts) && parsedProxy
}
-function replaceTilde (comp, options) {
- var r = options.loose ? re[TILDELOOSE] : re[TILDE]
- return comp.replace(r, function (_, M, m, p, pr) {
- debug('tilde', comp, _, M, m, p, pr)
- var ret
+let HttpProxyAgent
+let HttpsProxyAgent
+let SocksProxyAgent
+function getProxy (proxyUrl, opts, isHttps) {
+ let popts = {
+ host: proxyUrl.hostname,
+ port: proxyUrl.port,
+ protocol: proxyUrl.protocol,
+ path: proxyUrl.path,
+ auth: proxyUrl.auth,
+ ca: opts.ca,
+ cert: opts.cert,
+ key: opts.key,
+ timeout: opts.timeout === 0 ? 0 : opts.timeout + 1,
+ localAddress: opts.localAddress,
+ maxSockets: opts.maxSockets || 15,
+ rejectUnauthorized: opts.strictSSL
+ }
- if (isX(M)) {
- ret = ''
- } else if (isX(m)) {
- ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
- } else if (isX(p)) {
- // ~1.2 == >=1.2.0 <1.3.0
- ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
- } else if (pr) {
- debug('replaceTilde pr', pr)
- ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
- ' <' + M + '.' + (+m + 1) + '.0'
+ if (proxyUrl.protocol === 'http:' || proxyUrl.protocol === 'https:') {
+ if (!isHttps) {
+ if (!HttpProxyAgent) {
+ HttpProxyAgent = __webpack_require__(934)
+ }
+
+ return new HttpProxyAgent(popts)
} else {
- // ~1.2.3 == >=1.2.3 <1.3.0
- ret = '>=' + M + '.' + m + '.' + p +
- ' <' + M + '.' + (+m + 1) + '.0'
+ if (!HttpsProxyAgent) {
+ HttpsProxyAgent = __webpack_require__(717)
+ }
+
+ return new HttpsProxyAgent(popts)
+ }
+ }
+ if (proxyUrl.protocol.startsWith('socks')) {
+ if (!SocksProxyAgent) {
+ SocksProxyAgent = __webpack_require__(156)
}
- debug('tilde return', ret)
- return ret
- })
+ return new SocksProxyAgent(popts)
+ }
}
-// ^ --> * (any, kinda silly)
-// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0
-// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0
-// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0
-// ^1.2.3 --> >=1.2.3 <2.0.0
-// ^1.2.0 --> >=1.2.0 <2.0.0
-function replaceCarets (comp, options) {
- return comp.trim().split(/\s+/).map(function (comp) {
- return replaceCaret(comp, options)
- }).join(' ')
-}
-
-function replaceCaret (comp, options) {
- debug('caret', comp, options)
- var r = options.loose ? re[CARETLOOSE] : re[CARET]
- return comp.replace(r, function (_, M, m, p, pr) {
- debug('caret', comp, _, M, m, p, pr)
- var ret
- if (isX(M)) {
- ret = ''
- } else if (isX(m)) {
- ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
- } else if (isX(p)) {
- if (M === '0') {
- ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
- } else {
- ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0'
- }
- } else if (pr) {
- debug('replaceCaret pr', pr)
- if (M === '0') {
- if (m === '0') {
- ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
- ' <' + M + '.' + m + '.' + (+p + 1)
- } else {
- ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
- ' <' + M + '.' + (+m + 1) + '.0'
- }
- } else {
- ret = '>=' + M + '.' + m + '.' + p + '-' + pr +
- ' <' + (+M + 1) + '.0.0'
- }
- } else {
- debug('no pr')
- if (M === '0') {
- if (m === '0') {
- ret = '>=' + M + '.' + m + '.' + p +
- ' <' + M + '.' + m + '.' + (+p + 1)
- } else {
- ret = '>=' + M + '.' + m + '.' + p +
- ' <' + M + '.' + (+m + 1) + '.0'
- }
- } else {
- ret = '>=' + M + '.' + m + '.' + p +
- ' <' + (+M + 1) + '.0.0'
- }
- }
+/***/ }),
+/* 241 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
- debug('caret return', ret)
- return ret
- })
-}
+"use strict";
-function replaceXRanges (comp, options) {
- debug('replaceXRanges', comp, options)
- return comp.split(/\s+/).map(function (comp) {
- return replaceXRange(comp, options)
- }).join(' ')
-}
-function replaceXRange (comp, options) {
- comp = comp.trim()
- var r = options.loose ? re[XRANGELOOSE] : re[XRANGE]
- return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
- debug('xRange', comp, ret, gtlt, M, m, p, pr)
- var xM = isX(M)
- var xm = xM || isX(m)
- var xp = xm || isX(p)
- var anyX = xp
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = _default;
+exports.URL = exports.DNS = void 0;
- if (gtlt === '=' && anyX) {
- gtlt = ''
- }
+var _stringify = _interopRequireDefault(__webpack_require__(855));
- if (xM) {
- if (gtlt === '>' || gtlt === '<') {
- // nothing is allowed
- ret = '<0.0.0'
- } else {
- // nothing is forbidden
- ret = '*'
- }
- } else if (gtlt && anyX) {
- // we know patch is an x, because we have any x at all.
- // replace X with 0
- if (xm) {
- m = 0
- }
- p = 0
+var _parse = _interopRequireDefault(__webpack_require__(197));
- if (gtlt === '>') {
- // >1 => >=2.0.0
- // >1.2 => >=1.3.0
- // >1.2.3 => >= 1.2.4
- gtlt = '>='
- if (xm) {
- M = +M + 1
- m = 0
- p = 0
- } else {
- m = +m + 1
- p = 0
- }
- } else if (gtlt === '<=') {
- // <=0.7.x is actually <0.8.0, since any 0.7.x should
- // pass. Similarly, <=7.x is actually <8.0.0, etc.
- gtlt = '<'
- if (xm) {
- M = +M + 1
- } else {
- m = +m + 1
- }
- }
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
- ret = gtlt + M + '.' + m + '.' + p
- } else if (xm) {
- ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'
- } else if (xp) {
- ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'
- }
+function stringToBytes(str) {
+ str = unescape(encodeURIComponent(str)); // UTF8 escape
- debug('xRange return', ret)
+ const bytes = [];
- return ret
- })
-}
+ for (let i = 0; i < str.length; ++i) {
+ bytes.push(str.charCodeAt(i));
+ }
-// Because * is AND-ed with everything else in the comparator,
-// and '' means "any version", just remove the *s entirely.
-function replaceStars (comp, options) {
- debug('replaceStars', comp, options)
- // Looseness is ignored here. star is always as loose as it gets!
- return comp.trim().replace(re[STAR], '')
+ return bytes;
}
-// This function is passed to string.replace(re[HYPHENRANGE])
-// M, m, patch, prerelease, build
-// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5
-// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do
-// 1.2 - 3.4 => >=1.2.0 <3.5.0
-function hyphenReplace ($0,
- from, fM, fm, fp, fpr, fb,
- to, tM, tm, tp, tpr, tb) {
- if (isX(fM)) {
- from = ''
- } else if (isX(fm)) {
- from = '>=' + fM + '.0.0'
- } else if (isX(fp)) {
- from = '>=' + fM + '.' + fm + '.0'
- } else {
- from = '>=' + from
- }
+const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';
+exports.DNS = DNS;
+const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';
+exports.URL = URL;
- if (isX(tM)) {
- to = ''
- } else if (isX(tm)) {
- to = '<' + (+tM + 1) + '.0.0'
- } else if (isX(tp)) {
- to = '<' + tM + '.' + (+tm + 1) + '.0'
- } else if (tpr) {
- to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr
- } else {
- to = '<=' + to
- }
+function _default(name, version, hashfunc) {
+ function generateUUID(value, namespace, buf, offset) {
+ if (typeof value === 'string') {
+ value = stringToBytes(value);
+ }
- return (from + ' ' + to).trim()
-}
+ if (typeof namespace === 'string') {
+ namespace = (0, _parse.default)(namespace);
+ }
-// if ANY of the sets match ALL of its comparators, then pass
-Range.prototype.test = function (version) {
- if (!version) {
- return false
- }
+ if (namespace.length !== 16) {
+ throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');
+ } // Compute hash of namespace and value, Per 4.3
+ // Future: Use spread syntax when supported on all platforms, e.g. `bytes =
+ // hashfunc([...namespace, ... value])`
- if (typeof version === 'string') {
- version = new SemVer(version, this.options)
- }
- for (var i = 0; i < this.set.length; i++) {
- if (testSet(this.set[i], version, this.options)) {
- return true
- }
- }
- return false
-}
+ let bytes = new Uint8Array(16 + value.length);
+ bytes.set(namespace);
+ bytes.set(value, namespace.length);
+ bytes = hashfunc(bytes);
+ bytes[6] = bytes[6] & 0x0f | version;
+ bytes[8] = bytes[8] & 0x3f | 0x80;
-function testSet (set, version, options) {
- for (var i = 0; i < set.length; i++) {
- if (!set[i].test(version)) {
- return false
- }
- }
+ if (buf) {
+ offset = offset || 0;
- if (version.prerelease.length && !options.includePrerelease) {
- // Find the set of versions that are allowed to have prereleases
- // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0
- // That should allow `1.2.3-pr.2` to pass.
- // However, `1.2.4-alpha.notready` should NOT be allowed,
- // even though it's within the range set by the comparators.
- for (i = 0; i < set.length; i++) {
- debug(set[i].semver)
- if (set[i].semver === ANY) {
- continue
+ for (let i = 0; i < 16; ++i) {
+ buf[offset + i] = bytes[i];
}
- if (set[i].semver.prerelease.length > 0) {
- var allowed = set[i].semver
- if (allowed.major === version.major &&
- allowed.minor === version.minor &&
- allowed.patch === version.patch) {
- return true
- }
- }
+ return buf;
}
- // Version has a -pre, but it's not one of the ones we like.
- return false
- }
+ return (0, _stringify.default)(bytes);
+ } // Function#name is not settable on some platforms (#270)
- return true
-}
-exports.satisfies = satisfies
-function satisfies (version, range, options) {
try {
- range = new Range(range, options)
- } catch (er) {
- return false
- }
- return range.test(version)
-}
+ generateUUID.name = name; // eslint-disable-next-line no-empty
+ } catch (err) {} // For CommonJS default export support
-exports.maxSatisfying = maxSatisfying
-function maxSatisfying (versions, range, options) {
- var max = null
- var maxSV = null
- try {
- var rangeObj = new Range(range, options)
- } catch (er) {
- return null
- }
- versions.forEach(function (v) {
- if (rangeObj.test(v)) {
- // satisfies(v, range, options)
- if (!max || maxSV.compare(v) === -1) {
- // compare(max, v, true)
- max = v
- maxSV = new SemVer(max, options)
- }
- }
- })
- return max
-}
-exports.minSatisfying = minSatisfying
-function minSatisfying (versions, range, options) {
- var min = null
- var minSV = null
- try {
- var rangeObj = new Range(range, options)
- } catch (er) {
- return null
- }
- versions.forEach(function (v) {
- if (rangeObj.test(v)) {
- // satisfies(v, range, options)
- if (!min || minSV.compare(v) === 1) {
- // compare(min, v, true)
- min = v
- minSV = new SemVer(min, options)
- }
- }
- })
- return min
+ generateUUID.DNS = DNS;
+ generateUUID.URL = URL;
+ return generateUUID;
}
-exports.minVersion = minVersion
-function minVersion (range, loose) {
- range = new Range(range, loose)
+/***/ }),
+/* 242 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
- var minver = new SemVer('0.0.0')
- if (range.test(minver)) {
- return minver
- }
+"use strict";
- minver = new SemVer('0.0.0-0')
- if (range.test(minver)) {
- return minver
- }
+Object.defineProperty(exports, "__esModule", { value: true });
+__webpack_require__(71);
- minver = null
- for (var i = 0; i < range.set.length; ++i) {
- var comparators = range.set[i]
- comparators.forEach(function (comparator) {
- // Clone to avoid manipulating the comparator's semver object.
- var compver = new SemVer(comparator.semver.version)
- switch (comparator.operator) {
- case '>':
- if (compver.prerelease.length === 0) {
- compver.patch++
- } else {
- compver.prerelease.push(0)
- }
- compver.raw = compver.format()
- /* fallthrough */
- case '':
- case '>=':
- if (!minver || gt(minver, compver)) {
- minver = compver
- }
- break
- case '<':
- case '<=':
- /* Ignore maximum versions */
- break
- /* istanbul ignore next */
- default:
- throw new Error('Unexpected operation: ' + comparator.operator)
- }
- })
- }
+/***/ }),
+/* 243 */
+/***/ (function(module) {
- if (minver && range.test(minver)) {
- return minver
- }
+"use strict";
- return null
-}
-exports.validRange = validRange
-function validRange (range, options) {
- try {
- // Return '*' instead of '' so that truthiness works.
- // This will throw if it's invalid anyway
- return new Range(range, options).range || '*'
- } catch (er) {
- return null
- }
-}
+module.exports = isWin32() || isColorTerm()
-// Determine if version is less than all the versions possible in the range
-exports.ltr = ltr
-function ltr (version, range, options) {
- return outside(version, range, '<', options)
+function isWin32 () {
+ return process.platform === 'win32'
}
-// Determine if version is greater than all the versions possible in the range.
-exports.gtr = gtr
-function gtr (version, range, options) {
- return outside(version, range, '>', options)
+function isColorTerm () {
+ var termHasColor = /^screen|^xterm|^vt100|color|ansi|cygwin|linux/i
+ return !!process.env.COLORTERM || termHasColor.test(process.env.TERM)
}
-exports.outside = outside
-function outside (version, range, hilo, options) {
- version = new SemVer(version, options)
- range = new Range(range, options)
-
- var gtfn, ltefn, ltfn, comp, ecomp
- switch (hilo) {
- case '>':
- gtfn = gt
- ltefn = lte
- ltfn = lt
- comp = '>'
- ecomp = '>='
- break
- case '<':
- gtfn = lt
- ltefn = gte
- ltfn = gt
- comp = '<'
- ecomp = '<='
- break
- default:
- throw new TypeError('Must provide a hilo val of "<" or ">"')
- }
-
- // If it satisifes the range it is not outside
- if (satisfies(version, range, options)) {
- return false
- }
- // From now on, variable terms are as if we're in "gtr" mode.
- // but note that everything is flipped for the "ltr" function.
+/***/ }),
+/* 244 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
- for (var i = 0; i < range.set.length; ++i) {
- var comparators = range.set[i]
+"use strict";
- var high = null
- var low = null
- comparators.forEach(function (comparator) {
- if (comparator.semver === ANY) {
- comparator = new Comparator('>=0.0.0')
- }
- high = high || comparator
- low = low || comparator
- if (gtfn(comparator.semver, high.semver, options)) {
- high = comparator
- } else if (ltfn(comparator.semver, low.semver, options)) {
- low = comparator
- }
- })
+module.exports = __webpack_require__(442)
- // If the edge version comparator has a operator then our version
- // isn't outside it
- if (high.operator === comp || high.operator === ecomp) {
- return false
- }
- // If the lowest version comparator has an operator and our version
- // is less than it then it isn't higher than the range
- if ((!low.operator || low.operator === comp) &&
- ltefn(version, low.semver)) {
- return false
- } else if (low.operator === ecomp && ltfn(version, low.semver)) {
- return false
- }
- }
- return true
-}
+/***/ }),
+/* 245 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
-exports.prerelease = prerelease
-function prerelease (version, options) {
- var parsed = parse(version, options)
- return (parsed && parsed.prerelease.length) ? parsed.prerelease : null
-}
+"use strict";
-exports.intersects = intersects
-function intersects (r1, r2, options) {
- r1 = new Range(r1, options)
- r2 = new Range(r2, options)
- return r1.intersects(r2)
-}
-exports.coerce = coerce
-function coerce (version) {
- if (version instanceof SemVer) {
- return version
- }
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = void 0;
- if (typeof version !== 'string') {
- return null
- }
+var _crypto = _interopRequireDefault(__webpack_require__(417));
- var match = version.match(re[COERCE])
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
- if (match == null) {
- return null
+function md5(bytes) {
+ if (Array.isArray(bytes)) {
+ bytes = Buffer.from(bytes);
+ } else if (typeof bytes === 'string') {
+ bytes = Buffer.from(bytes, 'utf8');
}
- return parse(match[1] +
- '.' + (match[2] || '0') +
- '.' + (match[3] || '0'))
+ return _crypto.default.createHash('md5').update(bytes).digest();
}
+var _default = md5;
+exports.default = _default;
/***/ }),
-/* 281 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
+/* 246 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const internal_globber_1 = __webpack_require__(297);
-/**
- * Constructs a globber
- *
- * @param patterns Patterns separated by newlines
- * @param options Glob options
- */
-function create(patterns, options) {
- return __awaiter(this, void 0, void 0, function* () {
- return yield internal_globber_1.DefaultGlobber.create(patterns, options);
- });
-}
-exports.create = create;
-//# sourceMappingURL=glob.js.map
+module.exports = function(Promise, INTERNAL, tryConvertToPromise,
+ apiRejection, Proxyable) {
+var util = __webpack_require__(248);
+var isArray = util.isArray;
-/***/ }),
-/* 282 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+function toResolutionValue(val) {
+ switch(val) {
+ case -2: return [];
+ case -3: return {};
+ case -6: return new Map();
+ }
+}
-"use strict";
+function PromiseArray(values) {
+ var promise = this._promise = new Promise(INTERNAL);
+ if (values instanceof Promise) {
+ promise._propagateFrom(values, 3);
+ values.suppressUnhandledRejections();
+ }
+ promise._setOnCancel(this);
+ this._values = values;
+ this._length = 0;
+ this._totalResolved = 0;
+ this._init(undefined, -2);
+}
+util.inherits(PromiseArray, Proxyable);
+PromiseArray.prototype.length = function () {
+ return this._length;
+};
-const figgyPudding = __webpack_require__(122)
-const getStream = __webpack_require__(145)
-const npmFetch = __webpack_require__(789)
+PromiseArray.prototype.promise = function () {
+ return this._promise;
+};
-const SearchOpts = figgyPudding({
- detailed: { default: false },
- limit: { default: 20 },
- from: { default: 0 },
- quality: { default: 0.65 },
- popularity: { default: 0.98 },
- maintenance: { default: 0.5 },
- sortBy: {}
-})
+PromiseArray.prototype._init = function init(_, resolveValueIfEmpty) {
+ var values = tryConvertToPromise(this._values, this._promise);
+ if (values instanceof Promise) {
+ values = values._target();
+ var bitField = values._bitField;
+ ;
+ this._values = values;
-module.exports = search
-function search (query, opts) {
- return getStream.array(search.stream(query, opts))
-}
-search.stream = searchStream
-function searchStream (query, opts) {
- opts = SearchOpts(opts)
- switch (opts.sortBy) {
- case 'optimal': {
- opts = opts.concat({
- quality: 0.65,
- popularity: 0.98,
- maintenance: 0.5
- })
- break
- }
- case 'quality': {
- opts = opts.concat({
- quality: 1,
- popularity: 0,
- maintenance: 0
- })
- break
+ if (((bitField & 50397184) === 0)) {
+ this._promise._setAsyncGuaranteed();
+ return values._then(
+ init,
+ this._reject,
+ undefined,
+ this,
+ resolveValueIfEmpty
+ );
+ } else if (((bitField & 33554432) !== 0)) {
+ values = values._value();
+ } else if (((bitField & 16777216) !== 0)) {
+ return this._reject(values._reason());
+ } else {
+ return this._cancel();
+ }
}
- case 'popularity': {
- opts = opts.concat({
- quality: 0,
- popularity: 1,
- maintenance: 0
- })
- break
+ values = util.asArray(values);
+ if (values === null) {
+ var err = apiRejection(
+ "expecting an array or an iterable object but got " + util.classString(values)).reason();
+ this._promise._rejectCallback(err, false);
+ return;
}
- case 'maintenance': {
- opts = opts.concat({
- quality: 0,
- popularity: 0,
- maintenance: 1
- })
- break
+
+ if (values.length === 0) {
+ if (resolveValueIfEmpty === -5) {
+ this._resolveEmptyArray();
+ }
+ else {
+ this._resolve(toResolutionValue(resolveValueIfEmpty));
+ }
+ return;
}
- }
- return npmFetch.json.stream('/-/v1/search', 'objects.*',
- opts.concat({
- query: {
- text: Array.isArray(query) ? query.join(' ') : query,
- size: opts.limit,
- from: opts.from,
- quality: opts.quality,
- popularity: opts.popularity,
- maintenance: opts.maintenance
- },
- mapJson (obj) {
- if (obj.package.date) {
- obj.package.date = new Date(obj.package.date)
+ this._iterate(values);
+};
+
+PromiseArray.prototype._iterate = function(values) {
+ var len = this.getActualLength(values.length);
+ this._length = len;
+ this._values = this.shouldCopyValues() ? new Array(len) : this._values;
+ var result = this._promise;
+ var isResolved = false;
+ var bitField = null;
+ for (var i = 0; i < len; ++i) {
+ var maybePromise = tryConvertToPromise(values[i], result);
+
+ if (maybePromise instanceof Promise) {
+ maybePromise = maybePromise._target();
+ bitField = maybePromise._bitField;
+ } else {
+ bitField = null;
}
- if (opts.detailed) {
- return obj
+
+ if (isResolved) {
+ if (bitField !== null) {
+ maybePromise.suppressUnhandledRejections();
+ }
+ } else if (bitField !== null) {
+ if (((bitField & 50397184) === 0)) {
+ maybePromise._proxy(this, i);
+ this._values[i] = maybePromise;
+ } else if (((bitField & 33554432) !== 0)) {
+ isResolved = this._promiseFulfilled(maybePromise._value(), i);
+ } else if (((bitField & 16777216) !== 0)) {
+ isResolved = this._promiseRejected(maybePromise._reason(), i);
+ } else {
+ isResolved = this._promiseCancelled(i);
+ }
} else {
- return obj.package
+ isResolved = this._promiseFulfilled(maybePromise, i);
}
- }
- })
- )
-}
+ }
+ if (!isResolved) result._setAsyncGuaranteed();
+};
+PromiseArray.prototype._isResolved = function () {
+ return this._values === null;
+};
-/***/ }),
-/* 283 */,
-/* 284 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+PromiseArray.prototype._resolve = function (value) {
+ this._values = null;
+ this._promise._fulfill(value);
+};
-var once = __webpack_require__(49)
-var eos = __webpack_require__(3)
-var fs = __webpack_require__(747) // we only need fs to get the ReadStream and WriteStream prototypes
+PromiseArray.prototype._cancel = function() {
+ if (this._isResolved() || !this._promise._isCancellable()) return;
+ this._values = null;
+ this._promise._cancel();
+};
-var noop = function () {}
-var ancient = /^v?\.0/.test(process.version)
+PromiseArray.prototype._reject = function (reason) {
+ this._values = null;
+ this._promise._rejectCallback(reason, false);
+};
-var isFn = function (fn) {
- return typeof fn === 'function'
-}
+PromiseArray.prototype._promiseFulfilled = function (value, index) {
+ this._values[index] = value;
+ var totalResolved = ++this._totalResolved;
+ if (totalResolved >= this._length) {
+ this._resolve(this._values);
+ return true;
+ }
+ return false;
+};
-var isFS = function (stream) {
- if (!ancient) return false // newer node version do not need to care about fs is a special way
- if (!fs) return false // browser
- return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close)
-}
+PromiseArray.prototype._promiseCancelled = function() {
+ this._cancel();
+ return true;
+};
-var isRequest = function (stream) {
- return stream.setHeader && isFn(stream.abort)
-}
+PromiseArray.prototype._promiseRejected = function (reason) {
+ this._totalResolved++;
+ this._reject(reason);
+ return true;
+};
-var destroyer = function (stream, reading, writing, callback) {
- callback = once(callback)
+PromiseArray.prototype._resultCancelled = function() {
+ if (this._isResolved()) return;
+ var values = this._values;
+ this._cancel();
+ if (values instanceof Promise) {
+ values.cancel();
+ } else {
+ for (var i = 0; i < values.length; ++i) {
+ if (values[i] instanceof Promise) {
+ values[i].cancel();
+ }
+ }
+ }
+};
- var closed = false
- stream.on('close', function () {
- closed = true
- })
+PromiseArray.prototype.shouldCopyValues = function () {
+ return true;
+};
- eos(stream, {readable: reading, writable: writing}, function (err) {
- if (err) return callback(err)
- closed = true
- callback()
- })
+PromiseArray.prototype.getActualLength = function (len) {
+ return len;
+};
- var destroyed = false
- return function (err) {
- if (closed) return
- if (destroyed) return
- destroyed = true
+return PromiseArray;
+};
- if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks
- if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want
- if (isFn(stream.destroy)) return stream.destroy()
+/***/ }),
+/* 247 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
- callback(err || new Error('stream was destroyed'))
- }
-}
+"use strict";
-var call = function (fn) {
- fn()
+const os = __webpack_require__(87);
+const tty = __webpack_require__(867);
+const hasFlag = __webpack_require__(364);
+
+const {env} = process;
+
+let forceColor;
+if (hasFlag('no-color') ||
+ hasFlag('no-colors') ||
+ hasFlag('color=false') ||
+ hasFlag('color=never')) {
+ forceColor = 0;
+} else if (hasFlag('color') ||
+ hasFlag('colors') ||
+ hasFlag('color=true') ||
+ hasFlag('color=always')) {
+ forceColor = 1;
}
-var pipe = function (from, to) {
- return from.pipe(to)
+if ('FORCE_COLOR' in env) {
+ if (env.FORCE_COLOR === 'true') {
+ forceColor = 1;
+ } else if (env.FORCE_COLOR === 'false') {
+ forceColor = 0;
+ } else {
+ forceColor = env.FORCE_COLOR.length === 0 ? 1 : Math.min(parseInt(env.FORCE_COLOR, 10), 3);
+ }
}
-var pump = function () {
- var streams = Array.prototype.slice.call(arguments)
- var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop
+function translateLevel(level) {
+ if (level === 0) {
+ return false;
+ }
- if (Array.isArray(streams[0])) streams = streams[0]
- if (streams.length < 2) throw new Error('pump requires two streams per minimum')
+ return {
+ level,
+ hasBasic: true,
+ has256: level >= 2,
+ has16m: level >= 3
+ };
+}
- var error
- var destroys = streams.map(function (stream, i) {
- var reading = i < streams.length - 1
- var writing = i > 0
- return destroyer(stream, reading, writing, function (err) {
- if (!error) error = err
- if (err) destroys.forEach(call)
- if (reading) return
- destroys.forEach(call)
- callback(error)
- })
- })
+function supportsColor(haveStream, streamIsTTY) {
+ if (forceColor === 0) {
+ return 0;
+ }
- return streams.reduce(pipe)
-}
+ if (hasFlag('color=16m') ||
+ hasFlag('color=full') ||
+ hasFlag('color=truecolor')) {
+ return 3;
+ }
-module.exports = pump
+ if (hasFlag('color=256')) {
+ return 2;
+ }
+ if (haveStream && !streamIsTTY && forceColor === undefined) {
+ return 0;
+ }
-/***/ }),
-/* 285 */
-/***/ (function(module) {
+ const min = forceColor || 0;
-"use strict";
+ if (env.TERM === 'dumb') {
+ return min;
+ }
+
+ if (process.platform === 'win32') {
+ // Windows 10 build 10586 is the first Windows release that supports 256 colors.
+ // Windows 10 build 14931 is the first release that supports 16m/TrueColor.
+ const osRelease = os.release().split('.');
+ if (
+ Number(osRelease[0]) >= 10 &&
+ Number(osRelease[2]) >= 10586
+ ) {
+ return Number(osRelease[2]) >= 14931 ? 3 : 2;
+ }
+ return 1;
+ }
-function isArguments (thingy) {
- return thingy != null && typeof thingy === 'object' && thingy.hasOwnProperty('callee')
-}
+ if ('CI' in env) {
+ if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI', 'GITHUB_ACTIONS', 'BUILDKITE'].some(sign => sign in env) || env.CI_NAME === 'codeship') {
+ return 1;
+ }
-var types = {
- '*': {label: 'any', check: function () { return true }},
- A: {label: 'array', check: function (thingy) { return Array.isArray(thingy) || isArguments(thingy) }},
- S: {label: 'string', check: function (thingy) { return typeof thingy === 'string' }},
- N: {label: 'number', check: function (thingy) { return typeof thingy === 'number' }},
- F: {label: 'function', check: function (thingy) { return typeof thingy === 'function' }},
- O: {label: 'object', check: function (thingy) { return typeof thingy === 'object' && thingy != null && !types.A.check(thingy) && !types.E.check(thingy) }},
- B: {label: 'boolean', check: function (thingy) { return typeof thingy === 'boolean' }},
- E: {label: 'error', check: function (thingy) { return thingy instanceof Error }},
- Z: {label: 'null', check: function (thingy) { return thingy == null }}
-}
+ return min;
+ }
-function addSchema (schema, arity) {
- var group = arity[schema.length] = arity[schema.length] || []
- if (group.indexOf(schema) === -1) group.push(schema)
-}
+ if ('TEAMCITY_VERSION' in env) {
+ return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0;
+ }
-var validate = module.exports = function (rawSchemas, args) {
- if (arguments.length !== 2) throw wrongNumberOfArgs(['SA'], arguments.length)
- if (!rawSchemas) throw missingRequiredArg(0, 'rawSchemas')
- if (!args) throw missingRequiredArg(1, 'args')
- if (!types.S.check(rawSchemas)) throw invalidType(0, ['string'], rawSchemas)
- if (!types.A.check(args)) throw invalidType(1, ['array'], args)
- var schemas = rawSchemas.split('|')
- var arity = {}
+ if (env.COLORTERM === 'truecolor') {
+ return 3;
+ }
- schemas.forEach(function (schema) {
- for (var ii = 0; ii < schema.length; ++ii) {
- var type = schema[ii]
- if (!types[type]) throw unknownType(ii, type)
- }
- if (/E.*E/.test(schema)) throw moreThanOneError(schema)
- addSchema(schema, arity)
- if (/E/.test(schema)) {
- addSchema(schema.replace(/E.*$/, 'E'), arity)
- addSchema(schema.replace(/E/, 'Z'), arity)
- if (schema.length === 1) addSchema('', arity)
- }
- })
- var matching = arity[args.length]
- if (!matching) {
- throw wrongNumberOfArgs(Object.keys(arity), args.length)
- }
- for (var ii = 0; ii < args.length; ++ii) {
- var newMatching = matching.filter(function (schema) {
- var type = schema[ii]
- var typeCheck = types[type].check
- return typeCheck(args[ii])
- })
- if (!newMatching.length) {
- var labels = matching.map(function (schema) {
- return types[schema[ii]].label
- }).filter(function (schema) { return schema != null })
- throw invalidType(ii, labels, args[ii])
- }
- matching = newMatching
- }
-}
+ if ('TERM_PROGRAM' in env) {
+ const version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10);
-function missingRequiredArg (num) {
- return newException('EMISSINGARG', 'Missing required argument #' + (num + 1))
-}
+ switch (env.TERM_PROGRAM) {
+ case 'iTerm.app':
+ return version >= 3 ? 3 : 2;
+ case 'Apple_Terminal':
+ return 2;
+ // No default
+ }
+ }
-function unknownType (num, type) {
- return newException('EUNKNOWNTYPE', 'Unknown type ' + type + ' in argument #' + (num + 1))
-}
+ if (/-256(color)?$/i.test(env.TERM)) {
+ return 2;
+ }
-function invalidType (num, expectedTypes, value) {
- var valueType
- Object.keys(types).forEach(function (typeCode) {
- if (types[typeCode].check(value)) valueType = types[typeCode].label
- })
- return newException('EINVALIDTYPE', 'Argument #' + (num + 1) + ': Expected ' +
- englishList(expectedTypes) + ' but got ' + valueType)
-}
+ if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) {
+ return 1;
+ }
-function englishList (list) {
- return list.join(', ').replace(/, ([^,]+)$/, ' or $1')
-}
+ if ('COLORTERM' in env) {
+ return 1;
+ }
-function wrongNumberOfArgs (expected, got) {
- var english = englishList(expected)
- var args = expected.every(function (ex) { return ex.length === 1 })
- ? 'argument'
- : 'arguments'
- return newException('EWRONGARGCOUNT', 'Expected ' + english + ' ' + args + ' but got ' + got)
+ return min;
}
-function moreThanOneError (schema) {
- return newException('ETOOMANYERRORTYPES',
- 'Only one error type per argument signature is allowed, more than one found in "' + schema + '"')
+function getSupportLevel(stream) {
+ const level = supportsColor(stream, stream && stream.isTTY);
+ return translateLevel(level);
}
-function newException (code, msg) {
- var e = new Error(msg)
- e.code = code
- if (Error.captureStackTrace) Error.captureStackTrace(e, validate)
- return e
-}
+module.exports = {
+ supportsColor: getSupportLevel,
+ stdout: translateLevel(supportsColor(true, tty.isatty(1))),
+ stderr: translateLevel(supportsColor(true, tty.isatty(2)))
+};
/***/ }),
-/* 286 */
-/***/ (function(__unusedmodule, exports) {
+/* 248 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
+"use strict";
-// NOTE: These type checking functions intentionally don't use `instanceof`
-// because it is fragile and can be easily faked with `Object.create()`.
+var es5 = __webpack_require__(883);
+var canEvaluate = typeof navigator == "undefined";
-function isArray(arg) {
- if (Array.isArray) {
- return Array.isArray(arg);
- }
- return objectToString(arg) === '[object Array]';
-}
-exports.isArray = isArray;
+var errorObj = {e: {}};
+var tryCatchTarget;
+var globalObject = typeof self !== "undefined" ? self :
+ typeof window !== "undefined" ? window :
+ typeof global !== "undefined" ? global :
+ this !== undefined ? this : null;
-function isBoolean(arg) {
- return typeof arg === 'boolean';
+function tryCatcher() {
+ try {
+ var target = tryCatchTarget;
+ tryCatchTarget = null;
+ return target.apply(this, arguments);
+ } catch (e) {
+ errorObj.e = e;
+ return errorObj;
+ }
}
-exports.isBoolean = isBoolean;
-
-function isNull(arg) {
- return arg === null;
+function tryCatch(fn) {
+ tryCatchTarget = fn;
+ return tryCatcher;
}
-exports.isNull = isNull;
-function isNullOrUndefined(arg) {
- return arg == null;
-}
-exports.isNullOrUndefined = isNullOrUndefined;
+var inherits = function(Child, Parent) {
+ var hasProp = {}.hasOwnProperty;
-function isNumber(arg) {
- return typeof arg === 'number';
-}
-exports.isNumber = isNumber;
+ function T() {
+ this.constructor = Child;
+ this.constructor$ = Parent;
+ for (var propertyName in Parent.prototype) {
+ if (hasProp.call(Parent.prototype, propertyName) &&
+ propertyName.charAt(propertyName.length-1) !== "$"
+ ) {
+ this[propertyName + "$"] = Parent.prototype[propertyName];
+ }
+ }
+ }
+ T.prototype = Parent.prototype;
+ Child.prototype = new T();
+ return Child.prototype;
+};
-function isString(arg) {
- return typeof arg === 'string';
-}
-exports.isString = isString;
-function isSymbol(arg) {
- return typeof arg === 'symbol';
-}
-exports.isSymbol = isSymbol;
+function isPrimitive(val) {
+ return val == null || val === true || val === false ||
+ typeof val === "string" || typeof val === "number";
-function isUndefined(arg) {
- return arg === void 0;
}
-exports.isUndefined = isUndefined;
-function isRegExp(re) {
- return objectToString(re) === '[object RegExp]';
+function isObject(value) {
+ return typeof value === "function" ||
+ typeof value === "object" && value !== null;
}
-exports.isRegExp = isRegExp;
-function isObject(arg) {
- return typeof arg === 'object' && arg !== null;
-}
-exports.isObject = isObject;
+function maybeWrapAsError(maybeError) {
+ if (!isPrimitive(maybeError)) return maybeError;
-function isDate(d) {
- return objectToString(d) === '[object Date]';
+ return new Error(safeToString(maybeError));
}
-exports.isDate = isDate;
-function isError(e) {
- return (objectToString(e) === '[object Error]' || e instanceof Error);
+function withAppended(target, appendee) {
+ var len = target.length;
+ var ret = new Array(len + 1);
+ var i;
+ for (i = 0; i < len; ++i) {
+ ret[i] = target[i];
+ }
+ ret[i] = appendee;
+ return ret;
}
-exports.isError = isError;
-function isFunction(arg) {
- return typeof arg === 'function';
-}
-exports.isFunction = isFunction;
+function getDataPropertyOrDefault(obj, key, defaultValue) {
+ if (es5.isES5) {
+ var desc = Object.getOwnPropertyDescriptor(obj, key);
-function isPrimitive(arg) {
- return arg === null ||
- typeof arg === 'boolean' ||
- typeof arg === 'number' ||
- typeof arg === 'string' ||
- typeof arg === 'symbol' || // ES6 symbol
- typeof arg === 'undefined';
+ if (desc != null) {
+ return desc.get == null && desc.set == null
+ ? desc.value
+ : defaultValue;
+ }
+ } else {
+ return {}.hasOwnProperty.call(obj, key) ? obj[key] : undefined;
+ }
}
-exports.isPrimitive = isPrimitive;
-
-exports.isBuffer = Buffer.isBuffer;
-function objectToString(o) {
- return Object.prototype.toString.call(o);
+function notEnumerableProp(obj, name, value) {
+ if (isPrimitive(obj)) return obj;
+ var descriptor = {
+ value: value,
+ configurable: true,
+ enumerable: false,
+ writable: true
+ };
+ es5.defineProperty(obj, name, descriptor);
+ return obj;
}
-
-/***/ }),
-/* 287 */,
-/* 288 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-
-"use strict";
-
-
-// Update this array if you add/rename/remove files in this directory.
-// We support Browserify by skipping automatic module discovery and requiring modules directly.
-var modules = [
- __webpack_require__(162),
- __webpack_require__(640),
- __webpack_require__(797),
- __webpack_require__(645),
- __webpack_require__(877),
- __webpack_require__(762),
- __webpack_require__(28),
- __webpack_require__(189),
- __webpack_require__(92),
-];
-
-// Put all encoding/alias/codec definitions to single object and export it.
-for (var i = 0; i < modules.length; i++) {
- var module = modules[i];
- for (var enc in module)
- if (Object.prototype.hasOwnProperty.call(module, enc))
- exports[enc] = module[enc];
+function thrower(r) {
+ throw r;
}
+var inheritedDataKeys = (function() {
+ var excludedPrototypes = [
+ Array.prototype,
+ Object.prototype,
+ Function.prototype
+ ];
-/***/ }),
-/* 289 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-"use strict";
-
-
-// tar -x
-const hlo = __webpack_require__(891)
-const Unpack = __webpack_require__(63)
-const fs = __webpack_require__(747)
-const fsm = __webpack_require__(827)
-const path = __webpack_require__(622)
-
-const x = module.exports = (opt_, files, cb) => {
- if (typeof opt_ === 'function')
- cb = opt_, files = null, opt_ = {}
- else if (Array.isArray(opt_))
- files = opt_, opt_ = {}
-
- if (typeof files === 'function')
- cb = files, files = null
+ var isExcludedProto = function(val) {
+ for (var i = 0; i < excludedPrototypes.length; ++i) {
+ if (excludedPrototypes[i] === val) {
+ return true;
+ }
+ }
+ return false;
+ };
- if (!files)
- files = []
- else
- files = Array.from(files)
+ if (es5.isES5) {
+ var getKeys = Object.getOwnPropertyNames;
+ return function(obj) {
+ var ret = [];
+ var visitedKeys = Object.create(null);
+ while (obj != null && !isExcludedProto(obj)) {
+ var keys;
+ try {
+ keys = getKeys(obj);
+ } catch (e) {
+ return ret;
+ }
+ for (var i = 0; i < keys.length; ++i) {
+ var key = keys[i];
+ if (visitedKeys[key]) continue;
+ visitedKeys[key] = true;
+ var desc = Object.getOwnPropertyDescriptor(obj, key);
+ if (desc != null && desc.get == null && desc.set == null) {
+ ret.push(key);
+ }
+ }
+ obj = es5.getPrototypeOf(obj);
+ }
+ return ret;
+ };
+ } else {
+ var hasProp = {}.hasOwnProperty;
+ return function(obj) {
+ if (isExcludedProto(obj)) return [];
+ var ret = [];
- const opt = hlo(opt_)
+ /*jshint forin:false */
+ enumeration: for (var key in obj) {
+ if (hasProp.call(obj, key)) {
+ ret.push(key);
+ } else {
+ for (var i = 0; i < excludedPrototypes.length; ++i) {
+ if (hasProp.call(excludedPrototypes[i], key)) {
+ continue enumeration;
+ }
+ }
+ ret.push(key);
+ }
+ }
+ return ret;
+ };
+ }
- if (opt.sync && typeof cb === 'function')
- throw new TypeError('callback not supported for sync tar functions')
+})();
- if (!opt.file && typeof cb === 'function')
- throw new TypeError('callback only supported with file option')
+var thisAssignmentPattern = /this\s*\.\s*\S+\s*=/;
+function isClass(fn) {
+ try {
+ if (typeof fn === "function") {
+ var keys = es5.names(fn.prototype);
- if (files.length)
- filesFilter(opt, files)
+ var hasMethods = es5.isES5 && keys.length > 1;
+ var hasMethodsOtherThanConstructor = keys.length > 0 &&
+ !(keys.length === 1 && keys[0] === "constructor");
+ var hasThisAssignmentAndStaticMethods =
+ thisAssignmentPattern.test(fn + "") && es5.names(fn).length > 0;
- return opt.file && opt.sync ? extractFileSync(opt)
- : opt.file ? extractFile(opt, cb)
- : opt.sync ? extractSync(opt)
- : extract(opt)
+ if (hasMethods || hasMethodsOtherThanConstructor ||
+ hasThisAssignmentAndStaticMethods) {
+ return true;
+ }
+ }
+ return false;
+ } catch (e) {
+ return false;
+ }
}
-// construct a filter that limits the file entries listed
-// include child entries if a dir is included
-const filesFilter = (opt, files) => {
- const map = new Map(files.map(f => [f.replace(/\/+$/, ''), true]))
- const filter = opt.filter
-
- const mapHas = (file, r) => {
- const root = r || path.parse(file).root || '.'
- const ret = file === root ? false
- : map.has(file) ? map.get(file)
- : mapHas(path.dirname(file), root)
-
- map.set(file, ret)
- return ret
- }
-
- opt.filter = filter
- ? (file, entry) => filter(file, entry) && mapHas(file.replace(/\/+$/, ''))
- : file => mapHas(file.replace(/\/+$/, ''))
+function toFastProperties(obj) {
+ /*jshint -W027,-W055,-W031*/
+ function FakeConstructor() {}
+ FakeConstructor.prototype = obj;
+ var receiver = new FakeConstructor();
+ function ic() {
+ return typeof receiver.foo;
+ }
+ ic();
+ ic();
+ return obj;
+ eval(obj);
}
-const extractFileSync = opt => {
- const u = new Unpack.Sync(opt)
-
- const file = opt.file
- let threw = true
- let fd
- const stat = fs.statSync(file)
- // This trades a zero-byte read() syscall for a stat
- // However, it will usually result in less memory allocation
- const readSize = opt.maxReadSize || 16*1024*1024
- const stream = new fsm.ReadStreamSync(file, {
- readSize: readSize,
- size: stat.size
- })
- stream.pipe(u)
+var rident = /^[a-z$_][a-z$_0-9]*$/i;
+function isIdentifier(str) {
+ return rident.test(str);
}
-const extractFile = (opt, cb) => {
- const u = new Unpack(opt)
- const readSize = opt.maxReadSize || 16*1024*1024
-
- const file = opt.file
- const p = new Promise((resolve, reject) => {
- u.on('error', reject)
- u.on('close', resolve)
+function filledRange(count, prefix, suffix) {
+ var ret = new Array(count);
+ for(var i = 0; i < count; ++i) {
+ ret[i] = prefix + i + suffix;
+ }
+ return ret;
+}
- // This trades a zero-byte read() syscall for a stat
- // However, it will usually result in less memory allocation
- fs.stat(file, (er, stat) => {
- if (er)
- reject(er)
- else {
- const stream = new fsm.ReadStream(file, {
- readSize: readSize,
- size: stat.size
- })
- stream.on('error', reject)
- stream.pipe(u)
- }
- })
- })
- return cb ? p.then(cb, cb) : p
+function safeToString(obj) {
+ try {
+ return obj + "";
+ } catch (e) {
+ return "[no string representation]";
+ }
}
-const extractSync = opt => {
- return new Unpack.Sync(opt)
+function isError(obj) {
+ return obj instanceof Error ||
+ (obj !== null &&
+ typeof obj === "object" &&
+ typeof obj.message === "string" &&
+ typeof obj.name === "string");
}
-const extract = opt => {
- return new Unpack(opt)
+function markAsOriginatingFromRejection(e) {
+ try {
+ notEnumerableProp(e, "isOperational", true);
+ }
+ catch(ignore) {}
}
+function originatesFromRejection(e) {
+ if (e == null) return false;
+ return ((e instanceof Error["__BluebirdErrorTypes__"].OperationalError) ||
+ e["isOperational"] === true);
+}
-/***/ }),
-/* 290 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+function canAttachTrace(obj) {
+ return isError(obj) && es5.propertyIsWritable(obj, "stack");
+}
-"use strict";
+var ensureErrorObject = (function() {
+ if (!("stack" in new Error())) {
+ return function(value) {
+ if (canAttachTrace(value)) return value;
+ try {throw new Error(safeToString(value));}
+ catch(err) {return err;}
+ };
+ } else {
+ return function(value) {
+ if (canAttachTrace(value)) return value;
+ return new Error(safeToString(value));
+ };
+ }
+})();
+function classString(obj) {
+ return {}.toString.call(obj);
+}
-module.exports = __webpack_require__(305)
+function copyDescriptors(from, to, filter) {
+ var keys = es5.names(from);
+ for (var i = 0; i < keys.length; ++i) {
+ var key = keys[i];
+ if (filter(key)) {
+ try {
+ es5.defineProperty(to, key, es5.getDescriptor(from, key));
+ } catch (ignore) {}
+ }
+ }
+}
+var asArray = function(v) {
+ if (es5.isArray(v)) {
+ return v;
+ }
+ return null;
+};
-/***/ }),
-/* 291 */,
-/* 292 */,
-/* 293 */
-/***/ (function(module) {
+if (typeof Symbol !== "undefined" && Symbol.iterator) {
+ var ArrayFrom = typeof Array.from === "function" ? function(v) {
+ return Array.from(v);
+ } : function(v) {
+ var ret = [];
+ var it = v[Symbol.iterator]();
+ var itResult;
+ while (!((itResult = it.next()).done)) {
+ ret.push(itResult.value);
+ }
+ return ret;
+ };
-module.exports = require("buffer");
+ asArray = function(v) {
+ if (es5.isArray(v)) {
+ return v;
+ } else if (v != null && typeof v[Symbol.iterator] === "function") {
+ return ArrayFrom(v);
+ }
+ return null;
+ };
+}
-/***/ }),
-/* 294 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+var isNode = typeof process !== "undefined" &&
+ classString(process).toLowerCase() === "[object process]";
-"use strict";
+var hasEnvVariables = typeof process !== "undefined" &&
+ typeof process.env !== "undefined";
-const fs = __webpack_require__(747);
+function env(key) {
+ return hasEnvVariables ? process.env[key] : undefined;
+}
-module.exports = fp => new Promise(resolve => {
- fs.access(fp, err => {
- resolve(!err);
- });
-});
-
-module.exports.sync = fp => {
- try {
- fs.accessSync(fp);
- return true;
- } catch (err) {
- return false;
- }
-};
+function getNativePromise() {
+ if (typeof Promise === "function") {
+ try {
+ var promise = new Promise(function(){});
+ if (classString(promise) === "[object Promise]") {
+ return Promise;
+ }
+ } catch (e) {}
+ }
+}
+var reflectHandler;
+function contextBind(ctx, cb) {
+ if (ctx === null ||
+ typeof cb !== "function" ||
+ cb === reflectHandler) {
+ return cb;
+ }
-/***/ }),
-/* 295 */,
-/* 296 */,
-/* 297 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
+ if (ctx.domain !== null) {
+ cb = ctx.domain.bind(cb);
+ }
-"use strict";
+ var async = ctx.async;
+ if (async !== null) {
+ var old = cb;
+ cb = function() {
+ var $_len = arguments.length + 2;var args = new Array($_len); for(var $_i = 2; $_i < $_len ; ++$_i) {args[$_i] = arguments[$_i - 2];};
+ args[0] = old;
+ args[1] = this;
+ return async.runInAsyncScope.apply(async, args);
+ };
+ }
+ return cb;
+}
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-var __asyncValues = (this && this.__asyncValues) || function (o) {
- if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
- var m = o[Symbol.asyncIterator], i;
- return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
- function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
- function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
-};
-var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); }
-var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) {
- if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
- var g = generator.apply(thisArg, _arguments || []), i, q = [];
- return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
- function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
- function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
- function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
- function fulfill(value) { resume("next", value); }
- function reject(value) { resume("throw", value); }
- function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
+var ret = {
+ setReflectHandler: function(fn) {
+ reflectHandler = fn;
+ },
+ isClass: isClass,
+ isIdentifier: isIdentifier,
+ inheritedDataKeys: inheritedDataKeys,
+ getDataPropertyOrDefault: getDataPropertyOrDefault,
+ thrower: thrower,
+ isArray: es5.isArray,
+ asArray: asArray,
+ notEnumerableProp: notEnumerableProp,
+ isPrimitive: isPrimitive,
+ isObject: isObject,
+ isError: isError,
+ canEvaluate: canEvaluate,
+ errorObj: errorObj,
+ tryCatch: tryCatch,
+ inherits: inherits,
+ withAppended: withAppended,
+ maybeWrapAsError: maybeWrapAsError,
+ toFastProperties: toFastProperties,
+ filledRange: filledRange,
+ toString: safeToString,
+ canAttachTrace: canAttachTrace,
+ ensureErrorObject: ensureErrorObject,
+ originatesFromRejection: originatesFromRejection,
+ markAsOriginatingFromRejection: markAsOriginatingFromRejection,
+ classString: classString,
+ copyDescriptors: copyDescriptors,
+ isNode: isNode,
+ hasEnvVariables: hasEnvVariables,
+ env: env,
+ global: globalObject,
+ getNativePromise: getNativePromise,
+ contextBind: contextBind
};
-Object.defineProperty(exports, "__esModule", { value: true });
-const core = __webpack_require__(470);
-const fs = __webpack_require__(747);
-const globOptionsHelper = __webpack_require__(601);
-const path = __webpack_require__(622);
-const patternHelper = __webpack_require__(597);
-const internal_match_kind_1 = __webpack_require__(327);
-const internal_pattern_1 = __webpack_require__(923);
-const internal_search_state_1 = __webpack_require__(728);
-const IS_WINDOWS = process.platform === 'win32';
-class DefaultGlobber {
- constructor(options) {
- this.patterns = [];
- this.searchPaths = [];
- this.options = globOptionsHelper.getOptions(options);
- }
- getSearchPaths() {
- // Return a copy
- return this.searchPaths.slice();
- }
- glob() {
- var e_1, _a;
- return __awaiter(this, void 0, void 0, function* () {
- const result = [];
- try {
- for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) {
- const itemPath = _c.value;
- result.push(itemPath);
- }
- }
- catch (e_1_1) { e_1 = { error: e_1_1 }; }
- finally {
- try {
- if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b);
- }
- finally { if (e_1) throw e_1.error; }
- }
- return result;
- });
- }
- globGenerator() {
- return __asyncGenerator(this, arguments, function* globGenerator_1() {
- // Fill in defaults options
- const options = globOptionsHelper.getOptions(this.options);
- // Implicit descendants?
- const patterns = [];
- for (const pattern of this.patterns) {
- patterns.push(pattern);
- if (options.implicitDescendants &&
- (pattern.trailingSeparator ||
- pattern.segments[pattern.segments.length - 1] !== '**')) {
- patterns.push(new internal_pattern_1.Pattern(pattern.negate, pattern.segments.concat('**')));
- }
- }
- // Push the search paths
- const stack = [];
- for (const searchPath of patternHelper.getSearchPaths(patterns)) {
- core.debug(`Search path '${searchPath}'`);
- // Exists?
- try {
- // Intentionally using lstat. Detection for broken symlink
- // will be performed later (if following symlinks).
- yield __await(fs.promises.lstat(searchPath));
- }
- catch (err) {
- if (err.code === 'ENOENT') {
- continue;
- }
- throw err;
- }
- stack.unshift(new internal_search_state_1.SearchState(searchPath, 1));
- }
- // Search
- const traversalChain = []; // used to detect cycles
- while (stack.length) {
- // Pop
- const item = stack.pop();
- // Match?
- const match = patternHelper.match(patterns, item.path);
- const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path);
- if (!match && !partialMatch) {
- continue;
- }
- // Stat
- const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain)
- // Broken symlink, or symlink cycle detected, or no longer exists
- );
- // Broken symlink, or symlink cycle detected, or no longer exists
- if (!stats) {
- continue;
- }
- // Directory
- if (stats.isDirectory()) {
- // Matched
- if (match & internal_match_kind_1.MatchKind.Directory) {
- yield yield __await(item.path);
- }
- // Descend?
- else if (!partialMatch) {
- continue;
- }
- // Push the child items in reverse
- const childLevel = item.level + 1;
- const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel));
- stack.push(...childItems.reverse());
- }
- // File
- else if (match & internal_match_kind_1.MatchKind.File) {
- yield yield __await(item.path);
- }
- }
- });
- }
- /**
- * Constructs a DefaultGlobber
- */
- static create(patterns, options) {
- return __awaiter(this, void 0, void 0, function* () {
- const result = new DefaultGlobber(options);
- if (IS_WINDOWS) {
- patterns = patterns.replace(/\r\n/g, '\n');
- patterns = patterns.replace(/\r/g, '\n');
- }
- const lines = patterns.split('\n').map(x => x.trim());
- for (const line of lines) {
- // Empty or comment
- if (!line || line.startsWith('#')) {
- continue;
- }
- // Pattern
- else {
- result.patterns.push(new internal_pattern_1.Pattern(line));
- }
- }
- result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns));
- return result;
- });
+ret.isRecentNode = ret.isNode && (function() {
+ var version;
+ if (process.versions && process.versions.node) {
+ version = process.versions.node.split(".").map(Number);
+ } else if (process.version) {
+ version = process.version.split(".").map(Number);
}
- static stat(item, options, traversalChain) {
- return __awaiter(this, void 0, void 0, function* () {
- // Note:
- // `stat` returns info about the target of a symlink (or symlink chain)
- // `lstat` returns info about a symlink itself
- let stats;
- if (options.followSymbolicLinks) {
- try {
- // Use `stat` (following symlinks)
- stats = yield fs.promises.stat(item.path);
- }
- catch (err) {
- if (err.code === 'ENOENT') {
- if (options.omitBrokenSymbolicLinks) {
- core.debug(`Broken symlink '${item.path}'`);
- return undefined;
- }
- throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`);
- }
- throw err;
- }
- }
- else {
- // Use `lstat` (not following symlinks)
- stats = yield fs.promises.lstat(item.path);
- }
- // Note, isDirectory() returns false for the lstat of a symlink
- if (stats.isDirectory() && options.followSymbolicLinks) {
- // Get the realpath
- const realPath = yield fs.promises.realpath(item.path);
- // Fixup the traversal chain to match the item level
- while (traversalChain.length >= item.level) {
- traversalChain.pop();
- }
- // Test for a cycle
- if (traversalChain.some((x) => x === realPath)) {
- core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`);
- return undefined;
- }
- // Update the traversal chain
- traversalChain.push(realPath);
- }
- return stats;
- });
+ return (version[0] === 0 && version[1] > 10) || (version[0] > 0);
+})();
+ret.nodeSupportsAsyncResource = ret.isNode && (function() {
+ var supportsAsync = false;
+ try {
+ var res = __webpack_require__(303).AsyncResource;
+ supportsAsync = typeof res.prototype.runInAsyncScope === "function";
+ } catch (e) {
+ supportsAsync = false;
}
-}
-exports.DefaultGlobber = DefaultGlobber;
-//# sourceMappingURL=internal-globber.js.map
+ return supportsAsync;
+})();
+
+if (ret.isNode) ret.toFastProperties(process);
+
+try {throw new Error(); } catch (e) {ret.lastLineError = e;}
+module.exports = ret;
+
/***/ }),
-/* 298 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
+/* 249 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
-Object.defineProperty(exports, "__esModule", {
- value: true
-});
-exports.default = void 0;
-
-var _v = _interopRequireDefault(__webpack_require__(241));
+const BB = __webpack_require__(900)
-var _md = _interopRequireDefault(__webpack_require__(245));
+const cacache = __webpack_require__(426)
+const cacheKey = __webpack_require__(279)
+const optCheck = __webpack_require__(420)
+const packlist = __webpack_require__(110)
+const pipe = BB.promisify(__webpack_require__(371).pipe)
+const tar = __webpack_require__(591)
-function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+module.exports = packDir
+function packDir (manifest, label, dir, target, opts) {
+ opts = optCheck(opts)
-const v3 = (0, _v.default)('v3', 0x30, _md.default);
-var _default = v3;
-exports.default = _default;
+ const packer = opts.dirPacker
+ ? BB.resolve(opts.dirPacker(manifest, dir))
+ : mkPacker(dir)
-/***/ }),
-/* 299 */
-/***/ (function(module) {
+ if (!opts.cache) {
+ return packer.then(packer => pipe(packer, target))
+ } else {
+ const cacher = cacache.put.stream(
+ opts.cache, cacheKey('packed-dir', label), opts
+ ).on('integrity', i => {
+ target.emit('integrity', i)
+ })
+ return packer.then(packer => BB.all([
+ pipe(packer, cacher),
+ pipe(packer, target)
+ ]))
+ }
+}
-module.exports = function (blocking) {
- [process.stdout, process.stderr].forEach(function (stream) {
- if (stream._handle && stream.isTTY && typeof stream._handle.setBlocking === 'function') {
- stream._handle.setBlocking(blocking)
- }
+function mkPacker (dir) {
+ return packlist({ path: dir }).then(files => {
+ return tar.c({
+ cwd: dir,
+ gzip: true,
+ portable: true,
+ prefix: 'package/'
+ }, files)
})
}
/***/ }),
-/* 300 */
-/***/ (function(module) {
+/* 250 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
-// Generated by CoffeeScript 1.12.7
-(function() {
- var XMLNodeList;
+var constants = __webpack_require__(619)
- module.exports = XMLNodeList = (function() {
- function XMLNodeList(nodes) {
- this.nodes = nodes;
- }
+var origCwd = process.cwd
+var cwd = null
- Object.defineProperty(XMLNodeList.prototype, 'length', {
- get: function() {
- return this.nodes.length || 0;
- }
- });
+var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform
- XMLNodeList.prototype.clone = function() {
- return this.nodes = null;
- };
+process.cwd = function() {
+ if (!cwd)
+ cwd = origCwd.call(process)
+ return cwd
+}
+try {
+ process.cwd()
+} catch (er) {}
- XMLNodeList.prototype.item = function(index) {
- return this.nodes[index] || null;
- };
+var chdir = process.chdir
+process.chdir = function(d) {
+ cwd = null
+ chdir.call(process, d)
+}
- return XMLNodeList;
+module.exports = patch
- })();
+function patch (fs) {
+ // (re-)implement some things that are known busted or missing.
-}).call(this);
+ // lchmod, broken prior to 0.6.2
+ // back-port the fix here.
+ if (constants.hasOwnProperty('O_SYMLINK') &&
+ process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
+ patchLchmod(fs)
+ }
+ // lutimes implementation, or no-op
+ if (!fs.lutimes) {
+ patchLutimes(fs)
+ }
-/***/ }),
-/* 301 */,
-/* 302 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+ // https://github.com/isaacs/node-graceful-fs/issues/4
+ // Chown should not fail on einval or eperm if non-root.
+ // It should not fail on enosys ever, as this just indicates
+ // that a fs doesn't support the intended operation.
-module.exports = realpath
-realpath.realpath = realpath
-realpath.sync = realpathSync
-realpath.realpathSync = realpathSync
-realpath.monkeypatch = monkeypatch
-realpath.unmonkeypatch = unmonkeypatch
+ fs.chown = chownFix(fs.chown)
+ fs.fchown = chownFix(fs.fchown)
+ fs.lchown = chownFix(fs.lchown)
-var fs = __webpack_require__(747)
-var origRealpath = fs.realpath
-var origRealpathSync = fs.realpathSync
+ fs.chmod = chmodFix(fs.chmod)
+ fs.fchmod = chmodFix(fs.fchmod)
+ fs.lchmod = chmodFix(fs.lchmod)
-var version = process.version
-var ok = /^v[0-5]\./.test(version)
-var old = __webpack_require__(117)
+ fs.chownSync = chownFixSync(fs.chownSync)
+ fs.fchownSync = chownFixSync(fs.fchownSync)
+ fs.lchownSync = chownFixSync(fs.lchownSync)
-function newError (er) {
- return er && er.syscall === 'realpath' && (
- er.code === 'ELOOP' ||
- er.code === 'ENOMEM' ||
- er.code === 'ENAMETOOLONG'
- )
-}
+ fs.chmodSync = chmodFixSync(fs.chmodSync)
+ fs.fchmodSync = chmodFixSync(fs.fchmodSync)
+ fs.lchmodSync = chmodFixSync(fs.lchmodSync)
-function realpath (p, cache, cb) {
- if (ok) {
- return origRealpath(p, cache, cb)
- }
+ fs.stat = statFix(fs.stat)
+ fs.fstat = statFix(fs.fstat)
+ fs.lstat = statFix(fs.lstat)
- if (typeof cache === 'function') {
- cb = cache
- cache = null
+ fs.statSync = statFixSync(fs.statSync)
+ fs.fstatSync = statFixSync(fs.fstatSync)
+ fs.lstatSync = statFixSync(fs.lstatSync)
+
+ // if lchmod/lchown do not exist, then make them no-ops
+ if (!fs.lchmod) {
+ fs.lchmod = function (path, mode, cb) {
+ if (cb) process.nextTick(cb)
+ }
+ fs.lchmodSync = function () {}
}
- origRealpath(p, cache, function (er, result) {
- if (newError(er)) {
- old.realpath(p, cache, cb)
- } else {
- cb(er, result)
+ if (!fs.lchown) {
+ fs.lchown = function (path, uid, gid, cb) {
+ if (cb) process.nextTick(cb)
}
- })
-}
-
-function realpathSync (p, cache) {
- if (ok) {
- return origRealpathSync(p, cache)
+ fs.lchownSync = function () {}
}
- try {
- return origRealpathSync(p, cache)
- } catch (er) {
- if (newError(er)) {
- return old.realpathSync(p, cache)
- } else {
- throw er
- }
- }
-}
-
-function monkeypatch () {
- fs.realpath = realpath
- fs.realpathSync = realpathSync
-}
-
-function unmonkeypatch () {
- fs.realpath = origRealpath
- fs.realpathSync = origRealpathSync
-}
-
-
-/***/ }),
-/* 303 */
-/***/ (function(module) {
-
-module.exports = require("async_hooks");
-
-/***/ }),
-/* 304 */
-/***/ (function(module) {
-
-module.exports = require("string_decoder");
-
-/***/ }),
-/* 305 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-"use strict";
-
+ // on Windows, A/V software can lock the directory, causing this
+ // to fail with an EACCES or EPERM if the directory contains newly
+ // created files. Try again on failure, for up to 60 seconds.
-const BB = __webpack_require__(489)
+ // Set the timeout this long because some Windows Anti-Virus, such as Parity
+ // bit9, may lock files for up to a minute, causing npm package install
+ // failures. Also, take care to yield the scheduler. Windows scheduling gives
+ // CPU to a busy looping process, which can cause the program causing the lock
+ // contention to be starved of CPU by node, so the contention doesn't resolve.
+ if (platform === "win32") {
+ fs.rename = (function (fs$rename) { return function (from, to, cb) {
+ var start = Date.now()
+ var backoff = 0;
+ fs$rename(from, to, function CB (er) {
+ if (er
+ && (er.code === "EACCES" || er.code === "EPERM")
+ && Date.now() - start < 60000) {
+ setTimeout(function() {
+ fs.stat(to, function (stater, st) {
+ if (stater && stater.code === "ENOENT")
+ fs$rename(from, to, CB);
+ else
+ cb(er)
+ })
+ }, backoff)
+ if (backoff < 100)
+ backoff += 10;
+ return;
+ }
+ if (cb) cb(er)
+ })
+ }})(fs.rename)
+ }
-const contentPath = __webpack_require__(969)
-const figgyPudding = __webpack_require__(122)
-const finished = BB.promisify(__webpack_require__(371).finished)
-const fixOwner = __webpack_require__(133)
-const fs = __webpack_require__(598)
-const glob = BB.promisify(__webpack_require__(402))
-const index = __webpack_require__(407)
-const path = __webpack_require__(622)
-const rimraf = BB.promisify(__webpack_require__(503))
-const ssri = __webpack_require__(951)
+ // if read() returns EAGAIN, then just try it again.
+ fs.read = (function (fs$read) {
+ function read (fd, buffer, offset, length, position, callback_) {
+ var callback
+ if (callback_ && typeof callback_ === 'function') {
+ var eagCounter = 0
+ callback = function (er, _, __) {
+ if (er && er.code === 'EAGAIN' && eagCounter < 10) {
+ eagCounter ++
+ return fs$read.call(fs, fd, buffer, offset, length, position, callback)
+ }
+ callback_.apply(this, arguments)
+ }
+ }
+ return fs$read.call(fs, fd, buffer, offset, length, position, callback)
+ }
-BB.promisifyAll(fs)
+ // This ensures `util.promisify` works as it does for native `fs.read`.
+ read.__proto__ = fs$read
+ return read
+ })(fs.read)
-const VerifyOpts = figgyPudding({
- concurrency: {
- default: 20
- },
- filter: {},
- log: {
- default: { silly () {} }
- }
-})
+ fs.readSync = (function (fs$readSync) { return function (fd, buffer, offset, length, position) {
+ var eagCounter = 0
+ while (true) {
+ try {
+ return fs$readSync.call(fs, fd, buffer, offset, length, position)
+ } catch (er) {
+ if (er.code === 'EAGAIN' && eagCounter < 10) {
+ eagCounter ++
+ continue
+ }
+ throw er
+ }
+ }
+ }})(fs.readSync)
-module.exports = verify
-function verify (cache, opts) {
- opts = VerifyOpts(opts)
- opts.log.silly('verify', 'verifying cache at', cache)
- return BB.reduce([
- markStartTime,
- fixPerms,
- garbageCollect,
- rebuildIndex,
- cleanTmp,
- writeVerifile,
- markEndTime
- ], (stats, step, i) => {
- const label = step.name || `step #${i}`
- const start = new Date()
- return BB.resolve(step(cache, opts)).then(s => {
- s && Object.keys(s).forEach(k => {
- stats[k] = s[k]
+ function patchLchmod (fs) {
+ fs.lchmod = function (path, mode, callback) {
+ fs.open( path
+ , constants.O_WRONLY | constants.O_SYMLINK
+ , mode
+ , function (err, fd) {
+ if (err) {
+ if (callback) callback(err)
+ return
+ }
+ // prefer to return the chmod error, if one occurs,
+ // but still try to close, and report closing errors if they occur.
+ fs.fchmod(fd, mode, function (err) {
+ fs.close(fd, function(err2) {
+ if (callback) callback(err || err2)
+ })
+ })
})
- const end = new Date()
- if (!stats.runTime) { stats.runTime = {} }
- stats.runTime[label] = end - start
- return stats
- })
- }, {}).tap(stats => {
- stats.runTime.total = stats.endTime - stats.startTime
- opts.log.silly('verify', 'verification finished for', cache, 'in', `${stats.runTime.total}ms`)
- })
-}
-
-function markStartTime (cache, opts) {
- return { startTime: new Date() }
-}
-
-function markEndTime (cache, opts) {
- return { endTime: new Date() }
-}
+ }
-function fixPerms (cache, opts) {
- opts.log.silly('verify', 'fixing cache permissions')
- return fixOwner.mkdirfix(cache, cache).then(() => {
- // TODO - fix file permissions too
- return fixOwner.chownr(cache, cache)
- }).then(() => null)
-}
+ fs.lchmodSync = function (path, mode) {
+ var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode)
-// Implements a naive mark-and-sweep tracing garbage collector.
-//
-// The algorithm is basically as follows:
-// 1. Read (and filter) all index entries ("pointers")
-// 2. Mark each integrity value as "live"
-// 3. Read entire filesystem tree in `content-vX/` dir
-// 4. If content is live, verify its checksum and delete it if it fails
-// 5. If content is not marked as live, rimraf it.
-//
-function garbageCollect (cache, opts) {
- opts.log.silly('verify', 'garbage collecting content')
- const indexStream = index.lsStream(cache)
- const liveContent = new Set()
- indexStream.on('data', entry => {
- if (opts.filter && !opts.filter(entry)) { return }
- liveContent.add(entry.integrity.toString())
- })
- return finished(indexStream).then(() => {
- const contentDir = contentPath._contentDir(cache)
- return glob(path.join(contentDir, '**'), {
- follow: false,
- nodir: true,
- nosort: true
- }).then(files => {
- return BB.resolve({
- verifiedContent: 0,
- reclaimedCount: 0,
- reclaimedSize: 0,
- badContentCount: 0,
- keptSize: 0
- }).tap((stats) => BB.map(files, (f) => {
- const split = f.split(/[/\\]/)
- const digest = split.slice(split.length - 3).join('')
- const algo = split[split.length - 4]
- const integrity = ssri.fromHex(digest, algo)
- if (liveContent.has(integrity.toString())) {
- return verifyContent(f, integrity).then(info => {
- if (!info.valid) {
- stats.reclaimedCount++
- stats.badContentCount++
- stats.reclaimedSize += info.size
- } else {
- stats.verifiedContent++
- stats.keptSize += info.size
- }
- return stats
- })
+ // prefer to return the chmod error, if one occurs,
+ // but still try to close, and report closing errors if they occur.
+ var threw = true
+ var ret
+ try {
+ ret = fs.fchmodSync(fd, mode)
+ threw = false
+ } finally {
+ if (threw) {
+ try {
+ fs.closeSync(fd)
+ } catch (er) {}
} else {
- // No entries refer to this content. We can delete.
- stats.reclaimedCount++
- return fs.statAsync(f).then(s => {
- return rimraf(f).then(() => {
- stats.reclaimedSize += s.size
- return stats
- })
- })
+ fs.closeSync(fd)
}
- }, { concurrency: opts.concurrency }))
- })
- })
-}
-
-function verifyContent (filepath, sri) {
- return fs.statAsync(filepath).then(stat => {
- const contentInfo = {
- size: stat.size,
- valid: true
+ }
+ return ret
}
- return ssri.checkStream(
- fs.createReadStream(filepath),
- sri
- ).catch(err => {
- if (err.code !== 'EINTEGRITY') { throw err }
- return rimraf(filepath).then(() => {
- contentInfo.valid = false
- })
- }).then(() => contentInfo)
- }).catch({ code: 'ENOENT' }, () => ({ size: 0, valid: false }))
-}
+ }
-function rebuildIndex (cache, opts) {
- opts.log.silly('verify', 'rebuilding index')
- return index.ls(cache).then(entries => {
- const stats = {
- missingContent: 0,
- rejectedEntries: 0,
- totalEntries: 0
- }
- const buckets = {}
- for (let k in entries) {
- if (entries.hasOwnProperty(k)) {
- const hashed = index._hashKey(k)
- const entry = entries[k]
- const excluded = opts.filter && !opts.filter(entry)
- excluded && stats.rejectedEntries++
- if (buckets[hashed] && !excluded) {
- buckets[hashed].push(entry)
- } else if (buckets[hashed] && excluded) {
- // skip
- } else if (excluded) {
- buckets[hashed] = []
- buckets[hashed]._path = index._bucketPath(cache, k)
- } else {
- buckets[hashed] = [entry]
- buckets[hashed]._path = index._bucketPath(cache, k)
+ function patchLutimes (fs) {
+ if (constants.hasOwnProperty("O_SYMLINK")) {
+ fs.lutimes = function (path, at, mt, cb) {
+ fs.open(path, constants.O_SYMLINK, function (er, fd) {
+ if (er) {
+ if (cb) cb(er)
+ return
+ }
+ fs.futimes(fd, at, mt, function (er) {
+ fs.close(fd, function (er2) {
+ if (cb) cb(er || er2)
+ })
+ })
+ })
+ }
+
+ fs.lutimesSync = function (path, at, mt) {
+ var fd = fs.openSync(path, constants.O_SYMLINK)
+ var ret
+ var threw = true
+ try {
+ ret = fs.futimesSync(fd, at, mt)
+ threw = false
+ } finally {
+ if (threw) {
+ try {
+ fs.closeSync(fd)
+ } catch (er) {}
+ } else {
+ fs.closeSync(fd)
+ }
}
+ return ret
}
+
+ } else {
+ fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) }
+ fs.lutimesSync = function () {}
}
- return BB.map(Object.keys(buckets), key => {
- return rebuildBucket(cache, buckets[key], stats, opts)
- }, { concurrency: opts.concurrency }).then(() => stats)
- })
-}
+ }
-function rebuildBucket (cache, bucket, stats, opts) {
- return fs.truncateAsync(bucket._path).then(() => {
- // This needs to be serialized because cacache explicitly
- // lets very racy bucket conflicts clobber each other.
- return BB.mapSeries(bucket, entry => {
- const content = contentPath(cache, entry.integrity)
- return fs.statAsync(content).then(() => {
- return index.insert(cache, entry.key, entry.integrity, {
- metadata: entry.metadata,
- size: entry.size
- }).then(() => { stats.totalEntries++ })
- }).catch({ code: 'ENOENT' }, () => {
- stats.rejectedEntries++
- stats.missingContent++
+ function chmodFix (orig) {
+ if (!orig) return orig
+ return function (target, mode, cb) {
+ return orig.call(fs, target, mode, function (er) {
+ if (chownErOk(er)) er = null
+ if (cb) cb.apply(this, arguments)
})
- })
- })
-}
-
-function cleanTmp (cache, opts) {
- opts.log.silly('verify', 'cleaning tmp directory')
- return rimraf(path.join(cache, 'tmp'))
-}
+ }
+ }
-function writeVerifile (cache, opts) {
- const verifile = path.join(cache, '_lastverified')
- opts.log.silly('verify', 'writing verifile to ' + verifile)
- try {
- return fs.writeFileAsync(verifile, '' + (+(new Date())))
- } finally {
- fixOwner.chownr.sync(cache, verifile)
+ function chmodFixSync (orig) {
+ if (!orig) return orig
+ return function (target, mode) {
+ try {
+ return orig.call(fs, target, mode)
+ } catch (er) {
+ if (!chownErOk(er)) throw er
+ }
+ }
}
-}
-module.exports.lastRun = lastRun
-function lastRun (cache) {
- return fs.readFileAsync(
- path.join(cache, '_lastverified'), 'utf8'
- ).then(data => new Date(+data))
-}
+ function chownFix (orig) {
+ if (!orig) return orig
+ return function (target, uid, gid, cb) {
+ return orig.call(fs, target, uid, gid, function (er) {
+ if (chownErOk(er)) er = null
+ if (cb) cb.apply(this, arguments)
+ })
+ }
+ }
-/***/ }),
-/* 306 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+ function chownFixSync (orig) {
+ if (!orig) return orig
+ return function (target, uid, gid) {
+ try {
+ return orig.call(fs, target, uid, gid)
+ } catch (er) {
+ if (!chownErOk(er)) throw er
+ }
+ }
+ }
-var concatMap = __webpack_require__(896);
-var balanced = __webpack_require__(621);
+ function statFix (orig) {
+ if (!orig) return orig
+ // Older versions of Node erroneously returned signed integers for
+ // uid + gid.
+ return function (target, options, cb) {
+ if (typeof options === 'function') {
+ cb = options
+ options = null
+ }
+ function callback (er, stats) {
+ if (stats) {
+ if (stats.uid < 0) stats.uid += 0x100000000
+ if (stats.gid < 0) stats.gid += 0x100000000
+ }
+ if (cb) cb.apply(this, arguments)
+ }
+ return options ? orig.call(fs, target, options, callback)
+ : orig.call(fs, target, callback)
+ }
+ }
-module.exports = expandTop;
+ function statFixSync (orig) {
+ if (!orig) return orig
+ // Older versions of Node erroneously returned signed integers for
+ // uid + gid.
+ return function (target, options) {
+ var stats = options ? orig.call(fs, target, options)
+ : orig.call(fs, target)
+ if (stats.uid < 0) stats.uid += 0x100000000
+ if (stats.gid < 0) stats.gid += 0x100000000
+ return stats;
+ }
+ }
-var escSlash = '\0SLASH'+Math.random()+'\0';
-var escOpen = '\0OPEN'+Math.random()+'\0';
-var escClose = '\0CLOSE'+Math.random()+'\0';
-var escComma = '\0COMMA'+Math.random()+'\0';
-var escPeriod = '\0PERIOD'+Math.random()+'\0';
+ // ENOSYS means that the fs doesn't support the op. Just ignore
+ // that, because it doesn't matter.
+ //
+ // if there's no getuid, or if getuid() is something other
+ // than 0, and the error is EINVAL or EPERM, then just ignore
+ // it.
+ //
+ // This specific case is a silent failure in cp, install, tar,
+ // and most other unix tools that manage permissions.
+ //
+ // When running as root, or if other types of errors are
+ // encountered, then it's strict.
+ function chownErOk (er) {
+ if (!er)
+ return true
-function numeric(str) {
- return parseInt(str, 10) == str
- ? parseInt(str, 10)
- : str.charCodeAt(0);
-}
+ if (er.code === "ENOSYS")
+ return true
-function escapeBraces(str) {
- return str.split('\\\\').join(escSlash)
- .split('\\{').join(escOpen)
- .split('\\}').join(escClose)
- .split('\\,').join(escComma)
- .split('\\.').join(escPeriod);
-}
+ var nonroot = !process.getuid || process.getuid() !== 0
+ if (nonroot) {
+ if (er.code === "EINVAL" || er.code === "EPERM")
+ return true
+ }
-function unescapeBraces(str) {
- return str.split(escSlash).join('\\')
- .split(escOpen).join('{')
- .split(escClose).join('}')
- .split(escComma).join(',')
- .split(escPeriod).join('.');
+ return false
+ }
}
-// Basically just str.split(","), but handling cases
-// where we have nested braced sections, which should be
-// treated as individual members, like {a,{b,c},d}
-function parseCommaParts(str) {
- if (!str)
- return [''];
-
- var parts = [];
- var m = balanced('{', '}', str);
+/***/ }),
+/* 251 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
- if (!m)
- return str.split(',');
+"use strict";
- var pre = m.pre;
- var body = m.body;
- var post = m.post;
- var p = pre.split(',');
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const core = __importStar(__webpack_require__(470));
+const http_client_1 = __webpack_require__(22);
+const storage_blob_1 = __webpack_require__(373);
+const buffer = __importStar(__webpack_require__(293));
+const fs = __importStar(__webpack_require__(747));
+const stream = __importStar(__webpack_require__(794));
+const util = __importStar(__webpack_require__(669));
+const utils = __importStar(__webpack_require__(15));
+const constants_1 = __webpack_require__(931);
+const requestUtils_1 = __webpack_require__(899);
+/**
+ * Pipes the body of a HTTP response to a stream
+ *
+ * @param response the HTTP response
+ * @param output the writable stream
+ */
+function pipeResponseToStream(response, output) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const pipeline = util.promisify(stream.pipeline);
+ yield pipeline(response.message, output);
+ });
+}
+/**
+ * Class for tracking the download state and displaying stats.
+ */
+class DownloadProgress {
+ constructor(contentLength) {
+ this.contentLength = contentLength;
+ this.segmentIndex = 0;
+ this.segmentSize = 0;
+ this.segmentOffset = 0;
+ this.receivedBytes = 0;
+ this.displayedComplete = false;
+ this.startTime = Date.now();
+ }
+ /**
+ * Progress to the next segment. Only call this method when the previous segment
+ * is complete.
+ *
+ * @param segmentSize the length of the next segment
+ */
+ nextSegment(segmentSize) {
+ this.segmentOffset = this.segmentOffset + this.segmentSize;
+ this.segmentIndex = this.segmentIndex + 1;
+ this.segmentSize = segmentSize;
+ this.receivedBytes = 0;
+ core.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`);
+ }
+ /**
+ * Sets the number of bytes received for the current segment.
+ *
+ * @param receivedBytes the number of bytes received
+ */
+ setReceivedBytes(receivedBytes) {
+ this.receivedBytes = receivedBytes;
+ }
+ /**
+ * Returns the total number of bytes transferred.
+ */
+ getTransferredBytes() {
+ return this.segmentOffset + this.receivedBytes;
+ }
+ /**
+ * Returns true if the download is complete.
+ */
+ isDone() {
+ return this.getTransferredBytes() === this.contentLength;
+ }
+ /**
+ * Prints the current download stats. Once the download completes, this will print one
+ * last line and then stop.
+ */
+ display() {
+ if (this.displayedComplete) {
+ return;
+ }
+ const transferredBytes = this.segmentOffset + this.receivedBytes;
+ const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1);
+ const elapsedTime = Date.now() - this.startTime;
+ const downloadSpeed = (transferredBytes /
+ (1024 * 1024) /
+ (elapsedTime / 1000)).toFixed(1);
+ core.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`);
+ if (this.isDone()) {
+ this.displayedComplete = true;
+ }
+ }
+ /**
+ * Returns a function used to handle TransferProgressEvents.
+ */
+ onProgress() {
+ return (progress) => {
+ this.setReceivedBytes(progress.loadedBytes);
+ };
+ }
+ /**
+ * Starts the timer that displays the stats.
+ *
+ * @param delayInMs the delay between each write
+ */
+ startDisplayTimer(delayInMs = 1000) {
+ const displayCallback = () => {
+ this.display();
+ if (!this.isDone()) {
+ this.timeoutHandle = setTimeout(displayCallback, delayInMs);
+ }
+ };
+ this.timeoutHandle = setTimeout(displayCallback, delayInMs);
+ }
+ /**
+ * Stops the timer that displays the stats. As this typically indicates the download
+ * is complete, this will display one last line, unless the last line has already
+ * been written.
+ */
+ stopDisplayTimer() {
+ if (this.timeoutHandle) {
+ clearTimeout(this.timeoutHandle);
+ this.timeoutHandle = undefined;
+ }
+ this.display();
+ }
+}
+exports.DownloadProgress = DownloadProgress;
+/**
+ * Download the cache using the Actions toolkit http-client
+ *
+ * @param archiveLocation the URL for the cache
+ * @param archivePath the local path where the cache is saved
+ */
+function downloadCacheHttpClient(archiveLocation, archivePath) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const writeStream = fs.createWriteStream(archivePath);
+ const httpClient = new http_client_1.HttpClient('actions/cache');
+ const downloadResponse = yield requestUtils_1.retryHttpClientResponse('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); }));
+ // Abort download if no traffic received over the socket.
+ downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => {
+ downloadResponse.message.destroy();
+ core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`);
+ });
+ yield pipeResponseToStream(downloadResponse, writeStream);
+ // Validate download size.
+ const contentLengthHeader = downloadResponse.message.headers['content-length'];
+ if (contentLengthHeader) {
+ const expectedLength = parseInt(contentLengthHeader);
+ const actualLength = utils.getArchiveFileSizeIsBytes(archivePath);
+ if (actualLength !== expectedLength) {
+ throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
+ }
+ }
+ else {
+ core.debug('Unable to validate download, no Content-Length header');
+ }
+ });
+}
+exports.downloadCacheHttpClient = downloadCacheHttpClient;
+/**
+ * Download the cache using the Azure Storage SDK. Only call this method if the
+ * URL points to an Azure Storage endpoint.
+ *
+ * @param archiveLocation the URL for the cache
+ * @param archivePath the local path where the cache is saved
+ * @param options the download options with the defaults set
+ */
+function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
+ var _a;
+ return __awaiter(this, void 0, void 0, function* () {
+ const client = new storage_blob_1.BlockBlobClient(archiveLocation, undefined, {
+ retryOptions: {
+ // Override the timeout used when downloading each 4 MB chunk
+ // The default is 2 min / MB, which is way too slow
+ tryTimeoutInMs: options.timeoutInMs
+ }
+ });
+ const properties = yield client.getProperties();
+ const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1;
+ if (contentLength < 0) {
+ // We should never hit this condition, but just in case fall back to downloading the
+ // file as one large stream
+ core.debug('Unable to determine content length, downloading file with http-client...');
+ yield downloadCacheHttpClient(archiveLocation, archivePath);
+ }
+ else {
+ // Use downloadToBuffer for faster downloads, since internally it splits the
+ // file into 4 MB chunks which can then be parallelized and retried independently
+ //
+ // If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB
+ // on 64-bit systems), split the download into multiple segments
+ const maxSegmentSize = buffer.constants.MAX_LENGTH;
+ const downloadProgress = new DownloadProgress(contentLength);
+ const fd = fs.openSync(archivePath, 'w');
+ try {
+ downloadProgress.startDisplayTimer();
+ while (!downloadProgress.isDone()) {
+ const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;
+ const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);
+ downloadProgress.nextSegment(segmentSize);
+ const result = yield client.downloadToBuffer(segmentStart, segmentSize, {
+ concurrency: options.downloadConcurrency,
+ onProgress: downloadProgress.onProgress()
+ });
+ fs.writeFileSync(fd, result);
+ }
+ }
+ finally {
+ downloadProgress.stopDisplayTimer();
+ fs.closeSync(fd);
+ }
+ }
+ });
+}
+exports.downloadCacheStorageSDK = downloadCacheStorageSDK;
+//# sourceMappingURL=downloadUtils.js.map
- p[p.length-1] += '{' + body + '}';
- var postParts = parseCommaParts(post);
- if (post.length) {
- p[p.length-1] += postParts.shift();
- p.push.apply(p, postParts);
- }
+/***/ }),
+/* 252 */,
+/* 253 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
- parts.push.apply(parts, p);
+"use strict";
- return parts;
+module.exports = function(NEXT_FILTER) {
+var util = __webpack_require__(248);
+var getKeys = __webpack_require__(883).keys;
+var tryCatch = util.tryCatch;
+var errorObj = util.errorObj;
+
+function catchFilter(instances, cb, promise) {
+ return function(e) {
+ var boundTo = promise._boundValue();
+ predicateLoop: for (var i = 0; i < instances.length; ++i) {
+ var item = instances[i];
+
+ if (item === Error ||
+ (item != null && item.prototype instanceof Error)) {
+ if (e instanceof item) {
+ return tryCatch(cb).call(boundTo, e);
+ }
+ } else if (typeof item === "function") {
+ var matchesPredicate = tryCatch(item).call(boundTo, e);
+ if (matchesPredicate === errorObj) {
+ return matchesPredicate;
+ } else if (matchesPredicate) {
+ return tryCatch(cb).call(boundTo, e);
+ }
+ } else if (util.isObject(e)) {
+ var keys = getKeys(item);
+ for (var j = 0; j < keys.length; ++j) {
+ var key = keys[j];
+ if (item[key] != e[key]) {
+ continue predicateLoop;
+ }
+ }
+ return tryCatch(cb).call(boundTo, e);
+ }
+ }
+ return NEXT_FILTER;
+ };
}
-function expandTop(str) {
- if (!str)
- return [];
+return catchFilter;
+};
- // I don't know why Bash 4.3 does this, but it does.
- // Anything starting with {} will have the first two bytes preserved
- // but *only* at the top level, so {},a}b will not expand to anything,
- // but a{},b}c will be expanded to [a}c,abc].
- // One could argue that this is a bug in Bash, but since the goal of
- // this module is to match Bash's rules, we escape a leading {}
- if (str.substr(0, 2) === '{}') {
- str = '\\{\\}' + str.substr(2);
- }
- return expand(escapeBraces(str), true).map(unescapeBraces);
-}
+/***/ }),
+/* 254 */
+/***/ (function(module, exports, __webpack_require__) {
-function identity(e) {
- return e;
-}
+/* eslint-disable node/no-deprecated-api */
+var buffer = __webpack_require__(293)
+var Buffer = buffer.Buffer
-function embrace(str) {
- return '{' + str + '}';
+// alternative to using Object.keys for old browsers
+function copyProps (src, dst) {
+ for (var key in src) {
+ dst[key] = src[key]
+ }
}
-function isPadded(el) {
- return /^-?0\d/.test(el);
+if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) {
+ module.exports = buffer
+} else {
+ // Copy properties from require('buffer')
+ copyProps(buffer, exports)
+ exports.Buffer = SafeBuffer
}
-function lte(i, y) {
- return i <= y;
-}
-function gte(i, y) {
- return i >= y;
+function SafeBuffer (arg, encodingOrOffset, length) {
+ return Buffer(arg, encodingOrOffset, length)
}
-function expand(str, isTop) {
- var expansions = [];
-
- var m = balanced('{', '}', str);
- if (!m || /\$$/.test(m.pre)) return [str];
+// Copy static methods from Buffer
+copyProps(Buffer, SafeBuffer)
- var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
- var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
- var isSequence = isNumericSequence || isAlphaSequence;
- var isOptions = m.body.indexOf(',') >= 0;
- if (!isSequence && !isOptions) {
- // {a},b}
- if (m.post.match(/,.*\}/)) {
- str = m.pre + '{' + m.body + escClose + m.post;
- return expand(str);
- }
- return [str];
+SafeBuffer.from = function (arg, encodingOrOffset, length) {
+ if (typeof arg === 'number') {
+ throw new TypeError('Argument must not be a number')
}
+ return Buffer(arg, encodingOrOffset, length)
+}
- var n;
- if (isSequence) {
- n = m.body.split(/\.\./);
- } else {
- n = parseCommaParts(m.body);
- if (n.length === 1) {
- // x{{a,b}}y ==> x{a}y x{b}y
- n = expand(n[0], false).map(embrace);
- if (n.length === 1) {
- var post = m.post.length
- ? expand(m.post, false)
- : [''];
- return post.map(function(p) {
- return m.pre + n[0] + p;
- });
- }
- }
+SafeBuffer.alloc = function (size, fill, encoding) {
+ if (typeof size !== 'number') {
+ throw new TypeError('Argument must be a number')
}
-
- // at this point, n is the parts, and we know it's not a comma set
- // with a single entry.
-
- // no need to expand pre, since it is guaranteed to be free of brace-sets
- var pre = m.pre;
- var post = m.post.length
- ? expand(m.post, false)
- : [''];
-
- var N;
-
- if (isSequence) {
- var x = numeric(n[0]);
- var y = numeric(n[1]);
- var width = Math.max(n[0].length, n[1].length)
- var incr = n.length == 3
- ? Math.abs(numeric(n[2]))
- : 1;
- var test = lte;
- var reverse = y < x;
- if (reverse) {
- incr *= -1;
- test = gte;
- }
- var pad = n.some(isPadded);
-
- N = [];
-
- for (var i = x; test(i, y); i += incr) {
- var c;
- if (isAlphaSequence) {
- c = String.fromCharCode(i);
- if (c === '\\')
- c = '';
- } else {
- c = String(i);
- if (pad) {
- var need = width - c.length;
- if (need > 0) {
- var z = new Array(need + 1).join('0');
- if (i < 0)
- c = '-' + z + c.slice(1);
- else
- c = z + c;
- }
- }
- }
- N.push(c);
+ var buf = Buffer(size)
+ if (fill !== undefined) {
+ if (typeof encoding === 'string') {
+ buf.fill(fill, encoding)
+ } else {
+ buf.fill(fill)
}
} else {
- N = concatMap(n, function(el) { return expand(el, false) });
+ buf.fill(0)
}
+ return buf
+}
- for (var j = 0; j < N.length; j++) {
- for (var k = 0; k < post.length; k++) {
- var expansion = pre + N[j] + post[k];
- if (!isTop || isSequence || expansion)
- expansions.push(expansion);
- }
+SafeBuffer.allocUnsafe = function (size) {
+ if (typeof size !== 'number') {
+ throw new TypeError('Argument must be a number')
}
-
- return expansions;
+ return Buffer(size)
}
+SafeBuffer.allocUnsafeSlow = function (size) {
+ if (typeof size !== 'number') {
+ throw new TypeError('Argument must be a number')
+ }
+ return buffer.SlowBuffer(size)
+}
/***/ }),
-/* 307 */,
-/* 308 */,
-/* 309 */,
-/* 310 */
+/* 255 */,
+/* 256 */,
+/* 257 */
/***/ (function(module, __unusedexports, __webpack_require__) {
-/**
- * Module dependencies.
- */
+// Generated by CoffeeScript 1.12.7
+(function() {
+ var DocumentPosition, NodeType, XMLCData, XMLComment, XMLDeclaration, XMLDocType, XMLDummy, XMLElement, XMLNamedNodeMap, XMLNode, XMLNodeList, XMLProcessingInstruction, XMLRaw, XMLText, getValue, isEmpty, isFunction, isObject, ref1,
+ hasProp = {}.hasOwnProperty;
-var tls; // lazy-loaded...
-var url = __webpack_require__(835);
-var dns = __webpack_require__(819);
-var Agent = __webpack_require__(234);
-var SocksClient = __webpack_require__(198).SocksClient;
-var inherits = __webpack_require__(669).inherits;
+ ref1 = __webpack_require__(582), isObject = ref1.isObject, isFunction = ref1.isFunction, isEmpty = ref1.isEmpty, getValue = ref1.getValue;
-/**
- * Module exports.
- */
+ XMLElement = null;
-module.exports = SocksProxyAgent;
+ XMLCData = null;
-/**
- * The `SocksProxyAgent`.
- *
- * @api public
- */
+ XMLComment = null;
-function SocksProxyAgent(opts) {
- if (!(this instanceof SocksProxyAgent)) return new SocksProxyAgent(opts);
- if ('string' == typeof opts) opts = url.parse(opts);
- if (!opts)
- throw new Error(
- 'a SOCKS proxy server `host` and `port` must be specified!'
- );
- Agent.call(this, opts);
+ XMLDeclaration = null;
- var proxy = Object.assign({}, opts);
+ XMLDocType = null;
- // prefer `hostname` over `host`, because of `url.parse()`
- proxy.host = proxy.hostname || proxy.host;
+ XMLRaw = null;
- // SOCKS doesn't *technically* have a default port, but this is
- // the same default that `curl(1)` uses
- proxy.port = +proxy.port || 1080;
+ XMLText = null;
- if (proxy.host && proxy.path) {
- // if both a `host` and `path` are specified then it's most likely the
- // result of a `url.parse()` call... we need to remove the `path` portion so
- // that `net.connect()` doesn't attempt to open that as a unix socket file.
- delete proxy.path;
- delete proxy.pathname;
- }
+ XMLProcessingInstruction = null;
- // figure out if we want socks v4 or v5, based on the "protocol" used.
- // Defaults to 5.
- proxy.lookup = false;
- switch (proxy.protocol) {
- case 'socks4:':
- proxy.lookup = true;
- // pass through
- case 'socks4a:':
- proxy.version = 4;
- break;
- case 'socks5:':
- proxy.lookup = true;
- // pass through
- case 'socks:': // no version specified, default to 5h
- case 'socks5h:':
- proxy.version = 5;
- break;
- default:
- throw new TypeError(
- 'A "socks" protocol must be specified! Got: ' + proxy.protocol
- );
- }
-
- if (proxy.auth) {
- var auth = proxy.auth.split(':');
- proxy.authentication = { username: auth[0], password: auth[1] };
- proxy.userid = auth[0];
- }
- this.proxy = proxy;
-}
-inherits(SocksProxyAgent, Agent);
+ XMLDummy = null;
-/**
- * Initiates a SOCKS connection to the specified SOCKS proxy server,
- * which in turn connects to the specified remote host and port.
- *
- * @api public
- */
+ NodeType = null;
-SocksProxyAgent.prototype.callback = function connect(req, opts, fn) {
- var proxy = this.proxy;
+ XMLNodeList = null;
- // called once the SOCKS proxy has connected to the specified remote endpoint
- function onhostconnect(err, result) {
- if (err) return fn(err);
+ XMLNamedNodeMap = null;
- var socket = result.socket;
+ DocumentPosition = null;
- var s = socket;
- if (opts.secureEndpoint) {
- // since the proxy is connecting to an SSL server, we have
- // to upgrade this socket connection to an SSL connection
- if (!tls) tls = __webpack_require__(16);
- opts.socket = socket;
- opts.servername = opts.host;
- opts.host = null;
- opts.hostname = null;
- opts.port = null;
- s = tls.connect(opts);
+ module.exports = XMLNode = (function() {
+ function XMLNode(parent1) {
+ this.parent = parent1;
+ if (this.parent) {
+ this.options = this.parent.options;
+ this.stringify = this.parent.stringify;
+ }
+ this.value = null;
+ this.children = [];
+ this.baseURI = null;
+ if (!XMLElement) {
+ XMLElement = __webpack_require__(796);
+ XMLCData = __webpack_require__(657);
+ XMLComment = __webpack_require__(919);
+ XMLDeclaration = __webpack_require__(738);
+ XMLDocType = __webpack_require__(735);
+ XMLRaw = __webpack_require__(660);
+ XMLText = __webpack_require__(708);
+ XMLProcessingInstruction = __webpack_require__(491);
+ XMLDummy = __webpack_require__(956);
+ NodeType = __webpack_require__(683);
+ XMLNodeList = __webpack_require__(265);
+ XMLNamedNodeMap = __webpack_require__(451);
+ DocumentPosition = __webpack_require__(65);
+ }
}
- fn(null, s);
- }
-
- // called for the `dns.lookup()` callback
- function onlookup(err, ip) {
- if (err) return fn(err);
- options.destination.host = ip;
- SocksClient.createConnection(options, onhostconnect);
- }
-
- var options = {
- proxy: {
- ipaddress: proxy.host,
- port: +proxy.port,
- type: proxy.version
- },
- destination: {
- port: +opts.port
- },
- command: 'connect'
- };
-
- if (proxy.authentication) {
- options.proxy.userId = proxy.userid;
- options.proxy.password = proxy.authentication.password;
- }
-
- if (proxy.lookup) {
- // client-side DNS resolution for "4" and "5" socks proxy versions
- dns.lookup(opts.host, onlookup);
- } else {
- // proxy hostname DNS resolution for "4a" and "5h" socks proxy servers
- onlookup(null, opts.host);
- }
-}
-
-
-/***/ }),
-/* 311 */
-/***/ (function(module, exports, __webpack_require__) {
-
-"use strict";
+ Object.defineProperty(XMLNode.prototype, 'nodeName', {
+ get: function() {
+ return this.name;
+ }
+ });
+ Object.defineProperty(XMLNode.prototype, 'nodeType', {
+ get: function() {
+ return this.type;
+ }
+ });
-const rm = __webpack_require__(974)
-const link = __webpack_require__(273)
-const mkdir = __webpack_require__(836)
-const binLink = __webpack_require__(834)
+ Object.defineProperty(XMLNode.prototype, 'nodeValue', {
+ get: function() {
+ return this.value;
+ }
+ });
-exports = module.exports = {
- rm: rm,
- link: link.link,
- linkIfExists: link.linkIfExists,
- mkdir: mkdir,
- binLink: binLink
-}
+ Object.defineProperty(XMLNode.prototype, 'parentNode', {
+ get: function() {
+ return this.parent;
+ }
+ });
+ Object.defineProperty(XMLNode.prototype, 'childNodes', {
+ get: function() {
+ if (!this.childNodeList || !this.childNodeList.nodes) {
+ this.childNodeList = new XMLNodeList(this.children);
+ }
+ return this.childNodeList;
+ }
+ });
-/***/ }),
-/* 312 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+ Object.defineProperty(XMLNode.prototype, 'firstChild', {
+ get: function() {
+ return this.children[0] || null;
+ }
+ });
-// Generated by CoffeeScript 1.12.7
-(function() {
- var NodeType, WriterState, XMLDOMImplementation, XMLDocument, XMLDocumentCB, XMLStreamWriter, XMLStringWriter, assign, isFunction, ref;
+ Object.defineProperty(XMLNode.prototype, 'lastChild', {
+ get: function() {
+ return this.children[this.children.length - 1] || null;
+ }
+ });
- ref = __webpack_require__(582), assign = ref.assign, isFunction = ref.isFunction;
+ Object.defineProperty(XMLNode.prototype, 'previousSibling', {
+ get: function() {
+ var i;
+ i = this.parent.children.indexOf(this);
+ return this.parent.children[i - 1] || null;
+ }
+ });
- XMLDOMImplementation = __webpack_require__(515);
+ Object.defineProperty(XMLNode.prototype, 'nextSibling', {
+ get: function() {
+ var i;
+ i = this.parent.children.indexOf(this);
+ return this.parent.children[i + 1] || null;
+ }
+ });
- XMLDocument = __webpack_require__(559);
+ Object.defineProperty(XMLNode.prototype, 'ownerDocument', {
+ get: function() {
+ return this.document() || null;
+ }
+ });
- XMLDocumentCB = __webpack_require__(768);
+ Object.defineProperty(XMLNode.prototype, 'textContent', {
+ get: function() {
+ var child, j, len, ref2, str;
+ if (this.nodeType === NodeType.Element || this.nodeType === NodeType.DocumentFragment) {
+ str = '';
+ ref2 = this.children;
+ for (j = 0, len = ref2.length; j < len; j++) {
+ child = ref2[j];
+ if (child.textContent) {
+ str += child.textContent;
+ }
+ }
+ return str;
+ } else {
+ return null;
+ }
+ },
+ set: function(value) {
+ throw new Error("This DOM method is not implemented." + this.debugInfo());
+ }
+ });
- XMLStringWriter = __webpack_require__(347);
+ XMLNode.prototype.setParent = function(parent) {
+ var child, j, len, ref2, results;
+ this.parent = parent;
+ if (parent) {
+ this.options = parent.options;
+ this.stringify = parent.stringify;
+ }
+ ref2 = this.children;
+ results = [];
+ for (j = 0, len = ref2.length; j < len; j++) {
+ child = ref2[j];
+ results.push(child.setParent(this));
+ }
+ return results;
+ };
- XMLStreamWriter = __webpack_require__(458);
+ XMLNode.prototype.element = function(name, attributes, text) {
+ var childNode, item, j, k, key, lastChild, len, len1, ref2, ref3, val;
+ lastChild = null;
+ if (attributes === null && (text == null)) {
+ ref2 = [{}, null], attributes = ref2[0], text = ref2[1];
+ }
+ if (attributes == null) {
+ attributes = {};
+ }
+ attributes = getValue(attributes);
+ if (!isObject(attributes)) {
+ ref3 = [attributes, text], text = ref3[0], attributes = ref3[1];
+ }
+ if (name != null) {
+ name = getValue(name);
+ }
+ if (Array.isArray(name)) {
+ for (j = 0, len = name.length; j < len; j++) {
+ item = name[j];
+ lastChild = this.element(item);
+ }
+ } else if (isFunction(name)) {
+ lastChild = this.element(name.apply());
+ } else if (isObject(name)) {
+ for (key in name) {
+ if (!hasProp.call(name, key)) continue;
+ val = name[key];
+ if (isFunction(val)) {
+ val = val.apply();
+ }
+ if (!this.options.ignoreDecorators && this.stringify.convertAttKey && key.indexOf(this.stringify.convertAttKey) === 0) {
+ lastChild = this.attribute(key.substr(this.stringify.convertAttKey.length), val);
+ } else if (!this.options.separateArrayItems && Array.isArray(val) && isEmpty(val)) {
+ lastChild = this.dummy();
+ } else if (isObject(val) && isEmpty(val)) {
+ lastChild = this.element(key);
+ } else if (!this.options.keepNullNodes && (val == null)) {
+ lastChild = this.dummy();
+ } else if (!this.options.separateArrayItems && Array.isArray(val)) {
+ for (k = 0, len1 = val.length; k < len1; k++) {
+ item = val[k];
+ childNode = {};
+ childNode[key] = item;
+ lastChild = this.element(childNode);
+ }
+ } else if (isObject(val)) {
+ if (!this.options.ignoreDecorators && this.stringify.convertTextKey && key.indexOf(this.stringify.convertTextKey) === 0) {
+ lastChild = this.element(val);
+ } else {
+ lastChild = this.element(key);
+ lastChild.element(val);
+ }
+ } else {
+ lastChild = this.element(key, val);
+ }
+ }
+ } else if (!this.options.keepNullNodes && text === null) {
+ lastChild = this.dummy();
+ } else {
+ if (!this.options.ignoreDecorators && this.stringify.convertTextKey && name.indexOf(this.stringify.convertTextKey) === 0) {
+ lastChild = this.text(text);
+ } else if (!this.options.ignoreDecorators && this.stringify.convertCDataKey && name.indexOf(this.stringify.convertCDataKey) === 0) {
+ lastChild = this.cdata(text);
+ } else if (!this.options.ignoreDecorators && this.stringify.convertCommentKey && name.indexOf(this.stringify.convertCommentKey) === 0) {
+ lastChild = this.comment(text);
+ } else if (!this.options.ignoreDecorators && this.stringify.convertRawKey && name.indexOf(this.stringify.convertRawKey) === 0) {
+ lastChild = this.raw(text);
+ } else if (!this.options.ignoreDecorators && this.stringify.convertPIKey && name.indexOf(this.stringify.convertPIKey) === 0) {
+ lastChild = this.instruction(name.substr(this.stringify.convertPIKey.length), text);
+ } else {
+ lastChild = this.node(name, attributes, text);
+ }
+ }
+ if (lastChild == null) {
+ throw new Error("Could not create any elements with: " + name + ". " + this.debugInfo());
+ }
+ return lastChild;
+ };
- NodeType = __webpack_require__(683);
+ XMLNode.prototype.insertBefore = function(name, attributes, text) {
+ var child, i, newChild, refChild, removed;
+ if (name != null ? name.type : void 0) {
+ newChild = name;
+ refChild = attributes;
+ newChild.setParent(this);
+ if (refChild) {
+ i = children.indexOf(refChild);
+ removed = children.splice(i);
+ children.push(newChild);
+ Array.prototype.push.apply(children, removed);
+ } else {
+ children.push(newChild);
+ }
+ return newChild;
+ } else {
+ if (this.isRoot) {
+ throw new Error("Cannot insert elements at root level. " + this.debugInfo(name));
+ }
+ i = this.parent.children.indexOf(this);
+ removed = this.parent.children.splice(i);
+ child = this.parent.element(name, attributes, text);
+ Array.prototype.push.apply(this.parent.children, removed);
+ return child;
+ }
+ };
- WriterState = __webpack_require__(541);
+ XMLNode.prototype.insertAfter = function(name, attributes, text) {
+ var child, i, removed;
+ if (this.isRoot) {
+ throw new Error("Cannot insert elements at root level. " + this.debugInfo(name));
+ }
+ i = this.parent.children.indexOf(this);
+ removed = this.parent.children.splice(i + 1);
+ child = this.parent.element(name, attributes, text);
+ Array.prototype.push.apply(this.parent.children, removed);
+ return child;
+ };
- module.exports.create = function(name, xmldec, doctype, options) {
- var doc, root;
- if (name == null) {
- throw new Error("Root element needs a name.");
- }
- options = assign({}, xmldec, doctype, options);
- doc = new XMLDocument(options);
- root = doc.element(name);
- if (!options.headless) {
- doc.declaration(options);
- if ((options.pubID != null) || (options.sysID != null)) {
- doc.dtd(options);
+ XMLNode.prototype.remove = function() {
+ var i, ref2;
+ if (this.isRoot) {
+ throw new Error("Cannot remove the root element. " + this.debugInfo());
}
- }
- return root;
- };
+ i = this.parent.children.indexOf(this);
+ [].splice.apply(this.parent.children, [i, i - i + 1].concat(ref2 = [])), ref2;
+ return this.parent;
+ };
- module.exports.begin = function(options, onData, onEnd) {
- var ref1;
- if (isFunction(options)) {
- ref1 = [options, onData], onData = ref1[0], onEnd = ref1[1];
- options = {};
- }
- if (onData) {
- return new XMLDocumentCB(options, onData, onEnd);
- } else {
- return new XMLDocument(options);
- }
- };
+ XMLNode.prototype.node = function(name, attributes, text) {
+ var child, ref2;
+ if (name != null) {
+ name = getValue(name);
+ }
+ attributes || (attributes = {});
+ attributes = getValue(attributes);
+ if (!isObject(attributes)) {
+ ref2 = [attributes, text], text = ref2[0], attributes = ref2[1];
+ }
+ child = new XMLElement(this, name, attributes);
+ if (text != null) {
+ child.text(text);
+ }
+ this.children.push(child);
+ return child;
+ };
- module.exports.stringWriter = function(options) {
- return new XMLStringWriter(options);
- };
+ XMLNode.prototype.text = function(value) {
+ var child;
+ if (isObject(value)) {
+ this.element(value);
+ }
+ child = new XMLText(this, value);
+ this.children.push(child);
+ return this;
+ };
- module.exports.streamWriter = function(stream, options) {
- return new XMLStreamWriter(stream, options);
- };
+ XMLNode.prototype.cdata = function(value) {
+ var child;
+ child = new XMLCData(this, value);
+ this.children.push(child);
+ return this;
+ };
- module.exports.implementation = new XMLDOMImplementation();
+ XMLNode.prototype.comment = function(value) {
+ var child;
+ child = new XMLComment(this, value);
+ this.children.push(child);
+ return this;
+ };
- module.exports.nodeType = NodeType;
+ XMLNode.prototype.commentBefore = function(value) {
+ var child, i, removed;
+ i = this.parent.children.indexOf(this);
+ removed = this.parent.children.splice(i);
+ child = this.parent.comment(value);
+ Array.prototype.push.apply(this.parent.children, removed);
+ return this;
+ };
- module.exports.writerState = WriterState;
+ XMLNode.prototype.commentAfter = function(value) {
+ var child, i, removed;
+ i = this.parent.children.indexOf(this);
+ removed = this.parent.children.splice(i + 1);
+ child = this.parent.comment(value);
+ Array.prototype.push.apply(this.parent.children, removed);
+ return this;
+ };
-}).call(this);
+ XMLNode.prototype.raw = function(value) {
+ var child;
+ child = new XMLRaw(this, value);
+ this.children.push(child);
+ return this;
+ };
+ XMLNode.prototype.dummy = function() {
+ var child;
+ child = new XMLDummy(this);
+ return child;
+ };
-/***/ }),
-/* 313 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+ XMLNode.prototype.instruction = function(target, value) {
+ var insTarget, insValue, instruction, j, len;
+ if (target != null) {
+ target = getValue(target);
+ }
+ if (value != null) {
+ value = getValue(value);
+ }
+ if (Array.isArray(target)) {
+ for (j = 0, len = target.length; j < len; j++) {
+ insTarget = target[j];
+ this.instruction(insTarget);
+ }
+ } else if (isObject(target)) {
+ for (insTarget in target) {
+ if (!hasProp.call(target, insTarget)) continue;
+ insValue = target[insTarget];
+ this.instruction(insTarget, insValue);
+ }
+ } else {
+ if (isFunction(value)) {
+ value = value.apply();
+ }
+ instruction = new XMLProcessingInstruction(this, target, value);
+ this.children.push(instruction);
+ }
+ return this;
+ };
-"use strict";
+ XMLNode.prototype.instructionBefore = function(target, value) {
+ var child, i, removed;
+ i = this.parent.children.indexOf(this);
+ removed = this.parent.children.splice(i);
+ child = this.parent.instruction(target, value);
+ Array.prototype.push.apply(this.parent.children, removed);
+ return this;
+ };
+ XMLNode.prototype.instructionAfter = function(target, value) {
+ var child, i, removed;
+ i = this.parent.children.indexOf(this);
+ removed = this.parent.children.splice(i + 1);
+ child = this.parent.instruction(target, value);
+ Array.prototype.push.apply(this.parent.children, removed);
+ return this;
+ };
-var Buffer = __webpack_require__(215).Buffer;
-
-// NOTE: Due to 'stream' module being pretty large (~100Kb, significant in browser environments),
-// we opt to dependency-inject it instead of creating a hard dependency.
-module.exports = function(stream_module) {
- var Transform = stream_module.Transform;
-
- // == Encoder stream =======================================================
-
- function IconvLiteEncoderStream(conv, options) {
- this.conv = conv;
- options = options || {};
- options.decodeStrings = false; // We accept only strings, so we don't need to decode them.
- Transform.call(this, options);
- }
-
- IconvLiteEncoderStream.prototype = Object.create(Transform.prototype, {
- constructor: { value: IconvLiteEncoderStream }
- });
+ XMLNode.prototype.declaration = function(version, encoding, standalone) {
+ var doc, xmldec;
+ doc = this.document();
+ xmldec = new XMLDeclaration(doc, version, encoding, standalone);
+ if (doc.children.length === 0) {
+ doc.children.unshift(xmldec);
+ } else if (doc.children[0].type === NodeType.Declaration) {
+ doc.children[0] = xmldec;
+ } else {
+ doc.children.unshift(xmldec);
+ }
+ return doc.root() || doc;
+ };
- IconvLiteEncoderStream.prototype._transform = function(chunk, encoding, done) {
- if (typeof chunk != 'string')
- return done(new Error("Iconv encoding stream needs strings as its input."));
- try {
- var res = this.conv.write(chunk);
- if (res && res.length) this.push(res);
- done();
+ XMLNode.prototype.dtd = function(pubID, sysID) {
+ var child, doc, doctype, i, j, k, len, len1, ref2, ref3;
+ doc = this.document();
+ doctype = new XMLDocType(doc, pubID, sysID);
+ ref2 = doc.children;
+ for (i = j = 0, len = ref2.length; j < len; i = ++j) {
+ child = ref2[i];
+ if (child.type === NodeType.DocType) {
+ doc.children[i] = doctype;
+ return doctype;
}
- catch (e) {
- done(e);
+ }
+ ref3 = doc.children;
+ for (i = k = 0, len1 = ref3.length; k < len1; i = ++k) {
+ child = ref3[i];
+ if (child.isRoot) {
+ doc.children.splice(i, 0, doctype);
+ return doctype;
}
- }
+ }
+ doc.children.push(doctype);
+ return doctype;
+ };
- IconvLiteEncoderStream.prototype._flush = function(done) {
- try {
- var res = this.conv.end();
- if (res && res.length) this.push(res);
- done();
+ XMLNode.prototype.up = function() {
+ if (this.isRoot) {
+ throw new Error("The root node has no parent. Use doc() if you need to get the document object.");
+ }
+ return this.parent;
+ };
+
+ XMLNode.prototype.root = function() {
+ var node;
+ node = this;
+ while (node) {
+ if (node.type === NodeType.Document) {
+ return node.rootObject;
+ } else if (node.isRoot) {
+ return node;
+ } else {
+ node = node.parent;
}
- catch (e) {
- done(e);
+ }
+ };
+
+ XMLNode.prototype.document = function() {
+ var node;
+ node = this;
+ while (node) {
+ if (node.type === NodeType.Document) {
+ return node;
+ } else {
+ node = node.parent;
}
- }
+ }
+ };
- IconvLiteEncoderStream.prototype.collect = function(cb) {
- var chunks = [];
- this.on('error', cb);
- this.on('data', function(chunk) { chunks.push(chunk); });
- this.on('end', function() {
- cb(null, Buffer.concat(chunks));
- });
- return this;
- }
+ XMLNode.prototype.end = function(options) {
+ return this.document().end(options);
+ };
+ XMLNode.prototype.prev = function() {
+ var i;
+ i = this.parent.children.indexOf(this);
+ if (i < 1) {
+ throw new Error("Already at the first node. " + this.debugInfo());
+ }
+ return this.parent.children[i - 1];
+ };
- // == Decoder stream =======================================================
+ XMLNode.prototype.next = function() {
+ var i;
+ i = this.parent.children.indexOf(this);
+ if (i === -1 || i === this.parent.children.length - 1) {
+ throw new Error("Already at the last node. " + this.debugInfo());
+ }
+ return this.parent.children[i + 1];
+ };
- function IconvLiteDecoderStream(conv, options) {
- this.conv = conv;
- options = options || {};
- options.encoding = this.encoding = 'utf8'; // We output strings.
- Transform.call(this, options);
- }
+ XMLNode.prototype.importDocument = function(doc) {
+ var clonedRoot;
+ clonedRoot = doc.root().clone();
+ clonedRoot.parent = this;
+ clonedRoot.isRoot = false;
+ this.children.push(clonedRoot);
+ return this;
+ };
- IconvLiteDecoderStream.prototype = Object.create(Transform.prototype, {
- constructor: { value: IconvLiteDecoderStream }
- });
+ XMLNode.prototype.debugInfo = function(name) {
+ var ref2, ref3;
+ name = name || this.name;
+ if ((name == null) && !((ref2 = this.parent) != null ? ref2.name : void 0)) {
+ return "";
+ } else if (name == null) {
+ return "parent: <" + this.parent.name + ">";
+ } else if (!((ref3 = this.parent) != null ? ref3.name : void 0)) {
+ return "node: <" + name + ">";
+ } else {
+ return "node: <" + name + ">, parent: <" + this.parent.name + ">";
+ }
+ };
- IconvLiteDecoderStream.prototype._transform = function(chunk, encoding, done) {
- if (!Buffer.isBuffer(chunk) && !(chunk instanceof Uint8Array))
- return done(new Error("Iconv decoding stream needs buffers as its input."));
- try {
- var res = this.conv.write(chunk);
- if (res && res.length) this.push(res, this.encoding);
- done();
- }
- catch (e) {
- done(e);
- }
- }
+ XMLNode.prototype.ele = function(name, attributes, text) {
+ return this.element(name, attributes, text);
+ };
- IconvLiteDecoderStream.prototype._flush = function(done) {
- try {
- var res = this.conv.end();
- if (res && res.length) this.push(res, this.encoding);
- done();
- }
- catch (e) {
- done(e);
- }
- }
+ XMLNode.prototype.nod = function(name, attributes, text) {
+ return this.node(name, attributes, text);
+ };
- IconvLiteDecoderStream.prototype.collect = function(cb) {
- var res = '';
- this.on('error', cb);
- this.on('data', function(chunk) { res += chunk; });
- this.on('end', function() {
- cb(null, res);
- });
- return this;
- }
+ XMLNode.prototype.txt = function(value) {
+ return this.text(value);
+ };
- return {
- IconvLiteEncoderStream: IconvLiteEncoderStream,
- IconvLiteDecoderStream: IconvLiteDecoderStream,
+ XMLNode.prototype.dat = function(value) {
+ return this.cdata(value);
};
-};
+ XMLNode.prototype.com = function(value) {
+ return this.comment(value);
+ };
-/***/ }),
-/* 314 */,
-/* 315 */
-/***/ (function(module) {
+ XMLNode.prototype.ins = function(target, value) {
+ return this.instruction(target, value);
+ };
-"use strict";
+ XMLNode.prototype.doc = function() {
+ return this.document();
+ };
-module.exports = function(Promise) {
-function returner() {
- return this.value;
-}
-function thrower() {
- throw this.reason;
-}
+ XMLNode.prototype.dec = function(version, encoding, standalone) {
+ return this.declaration(version, encoding, standalone);
+ };
-Promise.prototype["return"] =
-Promise.prototype.thenReturn = function (value) {
- if (value instanceof Promise) value.suppressUnhandledRejections();
- return this._then(
- returner, undefined, undefined, {value: value}, undefined);
-};
+ XMLNode.prototype.e = function(name, attributes, text) {
+ return this.element(name, attributes, text);
+ };
-Promise.prototype["throw"] =
-Promise.prototype.thenThrow = function (reason) {
- return this._then(
- thrower, undefined, undefined, {reason: reason}, undefined);
-};
+ XMLNode.prototype.n = function(name, attributes, text) {
+ return this.node(name, attributes, text);
+ };
-Promise.prototype.catchThrow = function (reason) {
- if (arguments.length <= 1) {
- return this._then(
- undefined, thrower, undefined, {reason: reason}, undefined);
- } else {
- var _reason = arguments[1];
- var handler = function() {throw _reason;};
- return this.caught(reason, handler);
- }
-};
+ XMLNode.prototype.t = function(value) {
+ return this.text(value);
+ };
-Promise.prototype.catchReturn = function (value) {
- if (arguments.length <= 1) {
- if (value instanceof Promise) value.suppressUnhandledRejections();
- return this._then(
- undefined, returner, undefined, {value: value}, undefined);
- } else {
- var _value = arguments[1];
- if (_value instanceof Promise) _value.suppressUnhandledRejections();
- var handler = function() {return _value;};
- return this.caught(value, handler);
- }
-};
-};
+ XMLNode.prototype.d = function(value) {
+ return this.cdata(value);
+ };
+ XMLNode.prototype.c = function(value) {
+ return this.comment(value);
+ };
-/***/ }),
-/* 316 */,
-/* 317 */
-/***/ (function(__unusedmodule, exports) {
+ XMLNode.prototype.r = function(value) {
+ return this.raw(value);
+ };
-var undefined = (void 0); // Paranoia
+ XMLNode.prototype.i = function(target, value) {
+ return this.instruction(target, value);
+ };
-// Beyond this value, index getters/setters (i.e. array[0], array[1]) are so slow to
-// create, and consume so much memory, that the browser appears frozen.
-var MAX_ARRAY_LENGTH = 1e5;
+ XMLNode.prototype.u = function() {
+ return this.up();
+ };
-// Approximations of internal ECMAScript conversion functions
-var ECMAScript = (function() {
- // Stash a copy in case other scripts modify these
- var opts = Object.prototype.toString,
- ophop = Object.prototype.hasOwnProperty;
+ XMLNode.prototype.importXMLBuilder = function(doc) {
+ return this.importDocument(doc);
+ };
- return {
- // Class returns internal [[Class]] property, used to avoid cross-frame instanceof issues:
- Class: function(v) { return opts.call(v).replace(/^\[object *|\]$/g, ''); },
- HasProperty: function(o, p) { return p in o; },
- HasOwnProperty: function(o, p) { return ophop.call(o, p); },
- IsCallable: function(o) { return typeof o === 'function'; },
- ToInt32: function(v) { return v >> 0; },
- ToUint32: function(v) { return v >>> 0; }
- };
-}());
+ XMLNode.prototype.replaceChild = function(newChild, oldChild) {
+ throw new Error("This DOM method is not implemented." + this.debugInfo());
+ };
-// Snapshot intrinsics
-var LN2 = Math.LN2,
- abs = Math.abs,
- floor = Math.floor,
- log = Math.log,
- min = Math.min,
- pow = Math.pow,
- round = Math.round;
+ XMLNode.prototype.removeChild = function(oldChild) {
+ throw new Error("This DOM method is not implemented." + this.debugInfo());
+ };
-// ES5: lock down object properties
-function configureProperties(obj) {
- if (getOwnPropNames && defineProp) {
- var props = getOwnPropNames(obj), i;
- for (i = 0; i < props.length; i += 1) {
- defineProp(obj, props[i], {
- value: obj[props[i]],
- writable: false,
- enumerable: false,
- configurable: false
- });
- }
- }
-}
+ XMLNode.prototype.appendChild = function(newChild) {
+ throw new Error("This DOM method is not implemented." + this.debugInfo());
+ };
-// emulate ES5 getter/setter API using legacy APIs
-// http://blogs.msdn.com/b/ie/archive/2010/09/07/transitioning-existing-code-to-the-es5-getter-setter-apis.aspx
-// (second clause tests for Object.defineProperty() in IE<9 that only supports extending DOM prototypes, but
-// note that IE<9 does not support __defineGetter__ or __defineSetter__ so it just renders the method harmless)
-var defineProp
-if (Object.defineProperty && (function() {
- try {
- Object.defineProperty({}, 'x', {});
- return true;
- } catch (e) {
- return false;
- }
- })()) {
- defineProp = Object.defineProperty;
-} else {
- defineProp = function(o, p, desc) {
- if (!o === Object(o)) throw new TypeError("Object.defineProperty called on non-object");
- if (ECMAScript.HasProperty(desc, 'get') && Object.prototype.__defineGetter__) { Object.prototype.__defineGetter__.call(o, p, desc.get); }
- if (ECMAScript.HasProperty(desc, 'set') && Object.prototype.__defineSetter__) { Object.prototype.__defineSetter__.call(o, p, desc.set); }
- if (ECMAScript.HasProperty(desc, 'value')) { o[p] = desc.value; }
- return o;
- };
-}
+ XMLNode.prototype.hasChildNodes = function() {
+ return this.children.length !== 0;
+ };
-var getOwnPropNames = Object.getOwnPropertyNames || function (o) {
- if (o !== Object(o)) throw new TypeError("Object.getOwnPropertyNames called on non-object");
- var props = [], p;
- for (p in o) {
- if (ECMAScript.HasOwnProperty(o, p)) {
- props.push(p);
- }
- }
- return props;
-};
+ XMLNode.prototype.cloneNode = function(deep) {
+ throw new Error("This DOM method is not implemented." + this.debugInfo());
+ };
-// ES5: Make obj[index] an alias for obj._getter(index)/obj._setter(index, value)
-// for index in 0 ... obj.length
-function makeArrayAccessors(obj) {
- if (!defineProp) { return; }
+ XMLNode.prototype.normalize = function() {
+ throw new Error("This DOM method is not implemented." + this.debugInfo());
+ };
- if (obj.length > MAX_ARRAY_LENGTH) throw new RangeError("Array too large for polyfill");
+ XMLNode.prototype.isSupported = function(feature, version) {
+ return true;
+ };
- function makeArrayAccessor(index) {
- defineProp(obj, index, {
- 'get': function() { return obj._getter(index); },
- 'set': function(v) { obj._setter(index, v); },
- enumerable: true,
- configurable: false
- });
- }
+ XMLNode.prototype.hasAttributes = function() {
+ return this.attribs.length !== 0;
+ };
- var i;
- for (i = 0; i < obj.length; i += 1) {
- makeArrayAccessor(i);
- }
-}
+ XMLNode.prototype.compareDocumentPosition = function(other) {
+ var ref, res;
+ ref = this;
+ if (ref === other) {
+ return 0;
+ } else if (this.document() !== other.document()) {
+ res = DocumentPosition.Disconnected | DocumentPosition.ImplementationSpecific;
+ if (Math.random() < 0.5) {
+ res |= DocumentPosition.Preceding;
+ } else {
+ res |= DocumentPosition.Following;
+ }
+ return res;
+ } else if (ref.isAncestor(other)) {
+ return DocumentPosition.Contains | DocumentPosition.Preceding;
+ } else if (ref.isDescendant(other)) {
+ return DocumentPosition.Contains | DocumentPosition.Following;
+ } else if (ref.isPreceding(other)) {
+ return DocumentPosition.Preceding;
+ } else {
+ return DocumentPosition.Following;
+ }
+ };
-// Internal conversion functions:
-// pack() - take a number (interpreted as Type), output a byte array
-// unpack() - take a byte array, output a Type-like number
+ XMLNode.prototype.isSameNode = function(other) {
+ throw new Error("This DOM method is not implemented." + this.debugInfo());
+ };
-function as_signed(value, bits) { var s = 32 - bits; return (value << s) >> s; }
-function as_unsigned(value, bits) { var s = 32 - bits; return (value << s) >>> s; }
+ XMLNode.prototype.lookupPrefix = function(namespaceURI) {
+ throw new Error("This DOM method is not implemented." + this.debugInfo());
+ };
-function packI8(n) { return [n & 0xff]; }
-function unpackI8(bytes) { return as_signed(bytes[0], 8); }
+ XMLNode.prototype.isDefaultNamespace = function(namespaceURI) {
+ throw new Error("This DOM method is not implemented." + this.debugInfo());
+ };
-function packU8(n) { return [n & 0xff]; }
-function unpackU8(bytes) { return as_unsigned(bytes[0], 8); }
+ XMLNode.prototype.lookupNamespaceURI = function(prefix) {
+ throw new Error("This DOM method is not implemented." + this.debugInfo());
+ };
-function packU8Clamped(n) { n = round(Number(n)); return [n < 0 ? 0 : n > 0xff ? 0xff : n & 0xff]; }
+ XMLNode.prototype.isEqualNode = function(node) {
+ var i, j, ref2;
+ if (node.nodeType !== this.nodeType) {
+ return false;
+ }
+ if (node.children.length !== this.children.length) {
+ return false;
+ }
+ for (i = j = 0, ref2 = this.children.length - 1; 0 <= ref2 ? j <= ref2 : j >= ref2; i = 0 <= ref2 ? ++j : --j) {
+ if (!this.children[i].isEqualNode(node.children[i])) {
+ return false;
+ }
+ }
+ return true;
+ };
-function packI16(n) { return [(n >> 8) & 0xff, n & 0xff]; }
-function unpackI16(bytes) { return as_signed(bytes[0] << 8 | bytes[1], 16); }
+ XMLNode.prototype.getFeature = function(feature, version) {
+ throw new Error("This DOM method is not implemented." + this.debugInfo());
+ };
-function packU16(n) { return [(n >> 8) & 0xff, n & 0xff]; }
-function unpackU16(bytes) { return as_unsigned(bytes[0] << 8 | bytes[1], 16); }
+ XMLNode.prototype.setUserData = function(key, data, handler) {
+ throw new Error("This DOM method is not implemented." + this.debugInfo());
+ };
-function packI32(n) { return [(n >> 24) & 0xff, (n >> 16) & 0xff, (n >> 8) & 0xff, n & 0xff]; }
-function unpackI32(bytes) { return as_signed(bytes[0] << 24 | bytes[1] << 16 | bytes[2] << 8 | bytes[3], 32); }
+ XMLNode.prototype.getUserData = function(key) {
+ throw new Error("This DOM method is not implemented." + this.debugInfo());
+ };
-function packU32(n) { return [(n >> 24) & 0xff, (n >> 16) & 0xff, (n >> 8) & 0xff, n & 0xff]; }
-function unpackU32(bytes) { return as_unsigned(bytes[0] << 24 | bytes[1] << 16 | bytes[2] << 8 | bytes[3], 32); }
+ XMLNode.prototype.contains = function(other) {
+ if (!other) {
+ return false;
+ }
+ return other === this || this.isDescendant(other);
+ };
-function packIEEE754(v, ebits, fbits) {
+ XMLNode.prototype.isDescendant = function(node) {
+ var child, isDescendantChild, j, len, ref2;
+ ref2 = this.children;
+ for (j = 0, len = ref2.length; j < len; j++) {
+ child = ref2[j];
+ if (node === child) {
+ return true;
+ }
+ isDescendantChild = child.isDescendant(node);
+ if (isDescendantChild) {
+ return true;
+ }
+ }
+ return false;
+ };
- var bias = (1 << (ebits - 1)) - 1,
- s, e, f, ln,
- i, bits, str, bytes;
-
- function roundToEven(n) {
- var w = floor(n), f = n - w;
- if (f < 0.5)
- return w;
- if (f > 0.5)
- return w + 1;
- return w % 2 ? w + 1 : w;
- }
-
- // Compute sign, exponent, fraction
- if (v !== v) {
- // NaN
- // http://dev.w3.org/2006/webapi/WebIDL/#es-type-mapping
- e = (1 << ebits) - 1; f = pow(2, fbits - 1); s = 0;
- } else if (v === Infinity || v === -Infinity) {
- e = (1 << ebits) - 1; f = 0; s = (v < 0) ? 1 : 0;
- } else if (v === 0) {
- e = 0; f = 0; s = (1 / v === -Infinity) ? 1 : 0;
- } else {
- s = v < 0;
- v = abs(v);
+ XMLNode.prototype.isAncestor = function(node) {
+ return node.isDescendant(this);
+ };
- if (v >= pow(2, 1 - bias)) {
- e = min(floor(log(v) / LN2), 1023);
- f = roundToEven(v / pow(2, e) * pow(2, fbits));
- if (f / pow(2, fbits) >= 2) {
- e = e + 1;
- f = 1;
- }
- if (e > bias) {
- // Overflow
- e = (1 << ebits) - 1;
- f = 0;
+ XMLNode.prototype.isPreceding = function(node) {
+ var nodePos, thisPos;
+ nodePos = this.treePosition(node);
+ thisPos = this.treePosition(this);
+ if (nodePos === -1 || thisPos === -1) {
+ return false;
} else {
- // Normalized
- e = e + bias;
- f = f - pow(2, fbits);
+ return nodePos < thisPos;
}
- } else {
- // Denormalized
- e = 0;
- f = roundToEven(v / pow(2, 1 - bias - fbits));
- }
- }
-
- // Pack sign, exponent, fraction
- bits = [];
- for (i = fbits; i; i -= 1) { bits.push(f % 2 ? 1 : 0); f = floor(f / 2); }
- for (i = ebits; i; i -= 1) { bits.push(e % 2 ? 1 : 0); e = floor(e / 2); }
- bits.push(s ? 1 : 0);
- bits.reverse();
- str = bits.join('');
-
- // Bits to bytes
- bytes = [];
- while (str.length) {
- bytes.push(parseInt(str.substring(0, 8), 2));
- str = str.substring(8);
- }
- return bytes;
-}
+ };
-function unpackIEEE754(bytes, ebits, fbits) {
+ XMLNode.prototype.isFollowing = function(node) {
+ var nodePos, thisPos;
+ nodePos = this.treePosition(node);
+ thisPos = this.treePosition(this);
+ if (nodePos === -1 || thisPos === -1) {
+ return false;
+ } else {
+ return nodePos > thisPos;
+ }
+ };
- // Bytes to bits
- var bits = [], i, j, b, str,
- bias, s, e, f;
+ XMLNode.prototype.treePosition = function(node) {
+ var found, pos;
+ pos = 0;
+ found = false;
+ this.foreachTreeNode(this.document(), function(childNode) {
+ pos++;
+ if (!found && childNode === node) {
+ return found = true;
+ }
+ });
+ if (found) {
+ return pos;
+ } else {
+ return -1;
+ }
+ };
- for (i = bytes.length; i; i -= 1) {
- b = bytes[i - 1];
- for (j = 8; j; j -= 1) {
- bits.push(b % 2 ? 1 : 0); b = b >> 1;
- }
- }
- bits.reverse();
- str = bits.join('');
+ XMLNode.prototype.foreachTreeNode = function(node, func) {
+ var child, j, len, ref2, res;
+ node || (node = this.document());
+ ref2 = node.children;
+ for (j = 0, len = ref2.length; j < len; j++) {
+ child = ref2[j];
+ if (res = func(child)) {
+ return res;
+ } else {
+ res = this.foreachTreeNode(child, func);
+ if (res) {
+ return res;
+ }
+ }
+ }
+ };
- // Unpack sign, exponent, fraction
- bias = (1 << (ebits - 1)) - 1;
- s = parseInt(str.substring(0, 1), 2) ? -1 : 1;
- e = parseInt(str.substring(1, 1 + ebits), 2);
- f = parseInt(str.substring(1 + ebits), 2);
+ return XMLNode;
- // Produce number
- if (e === (1 << ebits) - 1) {
- return f !== 0 ? NaN : s * Infinity;
- } else if (e > 0) {
- // Normalized
- return s * pow(2, e - bias) * (1 + f / pow(2, fbits));
- } else if (f !== 0) {
- // Denormalized
- return s * pow(2, -(bias - 1)) * (f / pow(2, fbits));
- } else {
- return s < 0 ? -0 : 0;
- }
-}
+ })();
-function unpackF64(b) { return unpackIEEE754(b, 11, 52); }
-function packF64(v) { return packIEEE754(v, 11, 52); }
-function unpackF32(b) { return unpackIEEE754(b, 8, 23); }
-function packF32(v) { return packIEEE754(v, 8, 23); }
+}).call(this);
-//
-// 3 The ArrayBuffer Type
-//
+/***/ }),
+/* 258 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
-(function() {
+"use strict";
- /** @constructor */
- var ArrayBuffer = function ArrayBuffer(length) {
- length = ECMAScript.ToInt32(length);
- if (length < 0) throw new RangeError('ArrayBuffer size is not a small enough positive integer');
- this.byteLength = length;
- this._bytes = [];
- this._bytes.length = length;
+const ls = __webpack_require__(14)
+const get = __webpack_require__(425)
+const put = __webpack_require__(154)
+const rm = __webpack_require__(435)
+const verify = __webpack_require__(290)
+const setLocale = __webpack_require__(945).setLocale
+const clearMemoized = __webpack_require__(521).clearMemoized
+const tmp = __webpack_require__(862)
- var i;
- for (i = 0; i < this.byteLength; i += 1) {
- this._bytes[i] = 0;
- }
+setLocale('en')
- configureProperties(this);
- };
+const x = module.exports
- exports.ArrayBuffer = exports.ArrayBuffer || ArrayBuffer;
+x.ls = cache => ls(cache)
+x.ls.stream = cache => ls.stream(cache)
- //
- // 4 The ArrayBufferView Type
- //
+x.get = (cache, key, opts) => get(cache, key, opts)
+x.get.byDigest = (cache, hash, opts) => get.byDigest(cache, hash, opts)
+x.get.sync = (cache, key, opts) => get.sync(cache, key, opts)
+x.get.sync.byDigest = (cache, key, opts) => get.sync.byDigest(cache, key, opts)
+x.get.stream = (cache, key, opts) => get.stream(cache, key, opts)
+x.get.stream.byDigest = (cache, hash, opts) => get.stream.byDigest(cache, hash, opts)
+x.get.copy = (cache, key, dest, opts) => get.copy(cache, key, dest, opts)
+x.get.copy.byDigest = (cache, hash, dest, opts) => get.copy.byDigest(cache, hash, dest, opts)
+x.get.info = (cache, key) => get.info(cache, key)
+x.get.hasContent = (cache, hash) => get.hasContent(cache, hash)
+x.get.hasContent.sync = (cache, hash) => get.hasContent.sync(cache, hash)
- // NOTE: this constructor is not exported
- /** @constructor */
- var ArrayBufferView = function ArrayBufferView() {
- //this.buffer = null;
- //this.byteOffset = 0;
- //this.byteLength = 0;
- };
+x.put = (cache, key, data, opts) => put(cache, key, data, opts)
+x.put.stream = (cache, key, opts) => put.stream(cache, key, opts)
- //
- // 5 The Typed Array View Types
- //
+x.rm = (cache, key) => rm.entry(cache, key)
+x.rm.all = cache => rm.all(cache)
+x.rm.entry = x.rm
+x.rm.content = (cache, hash) => rm.content(cache, hash)
- function makeConstructor(bytesPerElement, pack, unpack) {
- // Each TypedArray type requires a distinct constructor instance with
- // identical logic, which this produces.
+x.setLocale = lang => setLocale(lang)
+x.clearMemoized = () => clearMemoized()
- var ctor;
- ctor = function(buffer, byteOffset, length) {
- var array, sequence, i, s;
+x.tmp = {}
+x.tmp.mkdir = (cache, opts) => tmp.mkdir(cache, opts)
+x.tmp.withTmp = (cache, opts, cb) => tmp.withTmp(cache, opts, cb)
- if (!arguments.length || typeof arguments[0] === 'number') {
- // Constructor(unsigned long length)
- this.length = ECMAScript.ToInt32(arguments[0]);
- if (length < 0) throw new RangeError('ArrayBufferView size is not a small enough positive integer');
+x.verify = (cache, opts) => verify(cache, opts)
+x.verify.lastRun = cache => verify.lastRun(cache)
- this.byteLength = this.length * this.BYTES_PER_ELEMENT;
- this.buffer = new ArrayBuffer(this.byteLength);
- this.byteOffset = 0;
- } else if (typeof arguments[0] === 'object' && arguments[0].constructor === ctor) {
- // Constructor(TypedArray array)
- array = arguments[0];
- this.length = array.length;
- this.byteLength = this.length * this.BYTES_PER_ELEMENT;
- this.buffer = new ArrayBuffer(this.byteLength);
- this.byteOffset = 0;
+/***/ }),
+/* 259 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
- for (i = 0; i < this.length; i += 1) {
- this._setter(i, array._getter(i));
- }
- } else if (typeof arguments[0] === 'object' &&
- !(arguments[0] instanceof ArrayBuffer || ECMAScript.Class(arguments[0]) === 'ArrayBuffer')) {
- // Constructor(sequence array)
- sequence = arguments[0];
+const Range = __webpack_require__(124)
+const intersects = (r1, r2, options) => {
+ r1 = new Range(r1, options)
+ r2 = new Range(r2, options)
+ return r1.intersects(r2)
+}
+module.exports = intersects
- this.length = ECMAScript.ToUint32(sequence.length);
- this.byteLength = this.length * this.BYTES_PER_ELEMENT;
- this.buffer = new ArrayBuffer(this.byteLength);
- this.byteOffset = 0;
- for (i = 0; i < this.length; i += 1) {
- s = sequence[i];
- this._setter(i, Number(s));
- }
- } else if (typeof arguments[0] === 'object' &&
- (arguments[0] instanceof ArrayBuffer || ECMAScript.Class(arguments[0]) === 'ArrayBuffer')) {
- // Constructor(ArrayBuffer buffer,
- // optional unsigned long byteOffset, optional unsigned long length)
- this.buffer = buffer;
+/***/ }),
+/* 260 */
+/***/ (function(module, exports, __webpack_require__) {
- this.byteOffset = ECMAScript.ToUint32(byteOffset);
- if (this.byteOffset > this.buffer.byteLength) {
- throw new RangeError("byteOffset out of range");
- }
+"use strict";
- if (this.byteOffset % this.BYTES_PER_ELEMENT) {
- // The given byteOffset must be a multiple of the element
- // size of the specific type, otherwise an exception is raised.
- throw new RangeError("ArrayBuffer length minus the byteOffset is not a multiple of the element size.");
- }
- if (arguments.length < 3) {
- this.byteLength = this.buffer.byteLength - this.byteOffset;
+exports = module.exports = lifecycle
+exports.makeEnv = makeEnv
+exports._incorrectWorkingDirectory = _incorrectWorkingDirectory
- if (this.byteLength % this.BYTES_PER_ELEMENT) {
- throw new RangeError("length of buffer minus byteOffset not a multiple of the element size");
- }
- this.length = this.byteLength / this.BYTES_PER_ELEMENT;
- } else {
- this.length = ECMAScript.ToUint32(length);
- this.byteLength = this.length * this.BYTES_PER_ELEMENT;
- }
+// for testing
+const platform = process.env.__TESTING_FAKE_PLATFORM__ || process.platform
+const isWindows = platform === 'win32'
+const spawn = __webpack_require__(128)
+const path = __webpack_require__(622)
+const Stream = __webpack_require__(794).Stream
+const fs = __webpack_require__(598)
+const chain = __webpack_require__(433).chain
+const uidNumber = __webpack_require__(322)
+const umask = __webpack_require__(696)
+const which = __webpack_require__(142)
+const byline = __webpack_require__(861)
+const resolveFrom = __webpack_require__(484)
- if ((this.byteOffset + this.byteLength) > this.buffer.byteLength) {
- throw new RangeError("byteOffset and length reference an area beyond the end of the buffer");
- }
- } else {
- throw new TypeError("Unexpected argument type(s)");
- }
+const DEFAULT_NODE_GYP_PATH = /*require.resolve*/( 693)
+const hookStatCache = new Map()
- this.constructor = ctor;
+let PATH = isWindows ? 'Path' : 'PATH'
+exports._pathEnvName = PATH
+const delimiter = path.delimiter
- configureProperties(this);
- makeArrayAccessors(this);
- };
+// windows calls its path 'Path' usually, but this is not guaranteed.
+// merge them all together in the order they appear in the object.
+const mergePath = env =>
+ Object.keys(env).filter(p => /^path$/i.test(p) && env[p])
+ .map(p => env[p].split(delimiter))
+ .reduce((set, p) => set.concat(p.filter(p => !set.includes(p))), [])
+ .join(delimiter)
+exports._mergePath = mergePath
- ctor.prototype = new ArrayBufferView();
- ctor.prototype.BYTES_PER_ELEMENT = bytesPerElement;
- ctor.prototype._pack = pack;
- ctor.prototype._unpack = unpack;
- ctor.BYTES_PER_ELEMENT = bytesPerElement;
+const setPathEnv = (env, path) => {
+ // first ensure that the canonical value is set.
+ env[PATH] = path
+ // also set any other case values, because windows.
+ Object.keys(env)
+ .filter(p => p !== PATH && /^path$/i.test(p))
+ .forEach(p => { env[p] = path })
+}
+exports._setPathEnv = setPathEnv
- // getter type (unsigned long index);
- ctor.prototype._getter = function(index) {
- if (arguments.length < 1) throw new SyntaxError("Not enough arguments");
+function logid (pkg, stage) {
+ return pkg._id + '~' + stage + ':'
+}
- index = ECMAScript.ToUint32(index);
- if (index >= this.length) {
- return undefined;
- }
+function hookStat (dir, stage, cb) {
+ const hook = path.join(dir, '.hooks', stage)
+ const cachedStatError = hookStatCache.get(hook)
- var bytes = [], i, o;
- for (i = 0, o = this.byteOffset + index * this.BYTES_PER_ELEMENT;
- i < this.BYTES_PER_ELEMENT;
- i += 1, o += 1) {
- bytes.push(this.buffer._bytes[o]);
- }
- return this._unpack(bytes);
- };
+ if (cachedStatError === undefined) {
+ return fs.stat(hook, function (statError) {
+ hookStatCache.set(hook, statError)
+ cb(statError)
+ })
+ }
- // NONSTANDARD: convenience alias for getter: type get(unsigned long index);
- ctor.prototype.get = ctor.prototype._getter;
+ return setImmediate(() => cb(cachedStatError))
+}
- // setter void (unsigned long index, type value);
- ctor.prototype._setter = function(index, value) {
- if (arguments.length < 2) throw new SyntaxError("Not enough arguments");
+function lifecycle (pkg, stage, wd, opts) {
+ return new Promise((resolve, reject) => {
+ while (pkg && pkg._data) pkg = pkg._data
+ if (!pkg) return reject(new Error('Invalid package data'))
- index = ECMAScript.ToUint32(index);
- if (index >= this.length) {
- return undefined;
- }
+ opts.log.info('lifecycle', logid(pkg, stage), pkg._id)
+ if (!pkg.scripts) pkg.scripts = {}
- var bytes = this._pack(value), i, o;
- for (i = 0, o = this.byteOffset + index * this.BYTES_PER_ELEMENT;
- i < this.BYTES_PER_ELEMENT;
- i += 1, o += 1) {
- this.buffer._bytes[o] = bytes[i];
- }
- };
+ if (stage === 'prepublish' && opts.ignorePrepublish) {
+ opts.log.info('lifecycle', logid(pkg, stage), 'ignored because ignore-prepublish is set to true', pkg._id)
+ delete pkg.scripts.prepublish
+ }
- // void set(TypedArray array, optional unsigned long offset);
- // void set(sequence array, optional unsigned long offset);
- ctor.prototype.set = function(index, value) {
- if (arguments.length < 1) throw new SyntaxError("Not enough arguments");
- var array, sequence, offset, len,
- i, s, d,
- byteOffset, byteLength, tmp;
+ hookStat(opts.dir, stage, function (statError) {
+ // makeEnv is a slow operation. This guard clause prevents makeEnv being called
+ // and avoids a ton of unnecessary work, and results in a major perf boost.
+ if (!pkg.scripts[stage] && statError) return resolve()
- if (typeof arguments[0] === 'object' && arguments[0].constructor === this.constructor) {
- // void set(TypedArray array, optional unsigned long offset);
- array = arguments[0];
- offset = ECMAScript.ToUint32(arguments[1]);
+ validWd(wd || path.resolve(opts.dir, pkg.name), function (er, wd) {
+ if (er) return reject(er)
- if (offset + array.length > this.length) {
- throw new RangeError("Offset plus length of array is out of range");
+ if ((wd.indexOf(opts.dir) !== 0 || _incorrectWorkingDirectory(wd, pkg)) &&
+ !opts.unsafePerm && pkg.scripts[stage]) {
+ opts.log.warn('lifecycle', logid(pkg, stage), 'cannot run in wd', pkg._id, pkg.scripts[stage], `(wd=${wd})`)
+ return resolve()
}
- byteOffset = this.byteOffset + offset * this.BYTES_PER_ELEMENT;
- byteLength = array.length * this.BYTES_PER_ELEMENT;
+ // set the env variables, then run scripts as a child process.
+ var env = makeEnv(pkg, opts)
+ env.npm_lifecycle_event = stage
+ env.npm_node_execpath = env.NODE = env.NODE || process.execPath
+ env.npm_execpath = require.main.filename
+ env.INIT_CWD = process.cwd()
+ env.npm_config_node_gyp = env.npm_config_node_gyp || DEFAULT_NODE_GYP_PATH
- if (array.buffer === this.buffer) {
- tmp = [];
- for (i = 0, s = array.byteOffset; i < byteLength; i += 1, s += 1) {
- tmp[i] = array.buffer._bytes[s];
- }
- for (i = 0, d = byteOffset; i < byteLength; i += 1, d += 1) {
- this.buffer._bytes[d] = tmp[i];
- }
- } else {
- for (i = 0, s = array.byteOffset, d = byteOffset;
- i < byteLength; i += 1, s += 1, d += 1) {
- this.buffer._bytes[d] = array.buffer._bytes[s];
- }
- }
- } else if (typeof arguments[0] === 'object' && typeof arguments[0].length !== 'undefined') {
- // void set(sequence array, optional unsigned long offset);
- sequence = arguments[0];
- len = ECMAScript.ToUint32(sequence.length);
- offset = ECMAScript.ToUint32(arguments[1]);
+ // 'nobody' typically doesn't have permission to write to /tmp
+ // even if it's never used, sh freaks out.
+ if (!opts.unsafePerm) env.TMPDIR = wd
- if (offset + len > this.length) {
- throw new RangeError("Offset plus length of array is out of range");
- }
+ lifecycle_(pkg, stage, wd, opts, env, (er) => {
+ if (er) return reject(er)
+ return resolve()
+ })
+ })
+ })
+ })
+}
- for (i = 0; i < len; i += 1) {
- s = sequence[i];
- this._setter(offset + i, Number(s));
- }
- } else {
- throw new TypeError("Unexpected argument type(s)");
- }
- };
+function _incorrectWorkingDirectory (wd, pkg) {
+ return wd.lastIndexOf(pkg.name) !== wd.length - pkg.name.length
+}
- // TypedArray subarray(long begin, optional long end);
- ctor.prototype.subarray = function(start, end) {
- function clamp(v, min, max) { return v < min ? min : v > max ? max : v; }
+function lifecycle_ (pkg, stage, wd, opts, env, cb) {
+ var pathArr = []
+ var p = wd.split(/[\\/]node_modules[\\/]/)
+ var acc = path.resolve(p.shift())
- start = ECMAScript.ToInt32(start);
- end = ECMAScript.ToInt32(end);
+ p.forEach(function (pp) {
+ pathArr.unshift(path.join(acc, 'node_modules', '.bin'))
+ acc = path.join(acc, 'node_modules', pp)
+ })
+ pathArr.unshift(path.join(acc, 'node_modules', '.bin'))
- if (arguments.length < 1) { start = 0; }
- if (arguments.length < 2) { end = this.length; }
+ // we also unshift the bundled node-gyp-bin folder so that
+ // the bundled one will be used for installing things.
+ pathArr.unshift(path.join(__dirname, 'node-gyp-bin'))
- if (start < 0) { start = this.length + start; }
- if (end < 0) { end = this.length + end; }
+ if (shouldPrependCurrentNodeDirToPATH(opts)) {
+ // prefer current node interpreter in child scripts
+ pathArr.push(path.dirname(process.execPath))
+ }
- start = clamp(start, 0, this.length);
- end = clamp(end, 0, this.length);
+ const existingPath = mergePath(env)
+ if (existingPath) pathArr.push(existingPath)
+ const envPath = pathArr.join(isWindows ? ';' : ':')
+ setPathEnv(env, envPath)
- var len = end - start;
- if (len < 0) {
- len = 0;
- }
+ var packageLifecycle = pkg.scripts && pkg.scripts.hasOwnProperty(stage)
- return new this.constructor(
- this.buffer, this.byteOffset + start * this.BYTES_PER_ELEMENT, len);
- };
+ if (opts.ignoreScripts) {
+ opts.log.info('lifecycle', logid(pkg, stage), 'ignored because ignore-scripts is set to true', pkg._id)
+ packageLifecycle = false
+ } else if (packageLifecycle) {
+ // define this here so it's available to all scripts.
+ env.npm_lifecycle_script = pkg.scripts[stage]
+ } else {
+ opts.log.silly('lifecycle', logid(pkg, stage), 'no script for ' + stage + ', continuing')
+ }
- return ctor;
+ function done (er) {
+ if (er) {
+ if (opts.force) {
+ opts.log.info('lifecycle', logid(pkg, stage), 'forced, continuing', er)
+ er = null
+ } else if (opts.failOk) {
+ opts.log.warn('lifecycle', logid(pkg, stage), 'continuing anyway', er.message)
+ er = null
+ }
+ }
+ cb(er)
}
- var Int8Array = makeConstructor(1, packI8, unpackI8);
- var Uint8Array = makeConstructor(1, packU8, unpackU8);
- var Uint8ClampedArray = makeConstructor(1, packU8Clamped, unpackU8);
- var Int16Array = makeConstructor(2, packI16, unpackI16);
- var Uint16Array = makeConstructor(2, packU16, unpackU16);
- var Int32Array = makeConstructor(4, packI32, unpackI32);
- var Uint32Array = makeConstructor(4, packU32, unpackU32);
- var Float32Array = makeConstructor(4, packF32, unpackF32);
- var Float64Array = makeConstructor(8, packF64, unpackF64);
+ chain(
+ [
+ packageLifecycle && [runPackageLifecycle, pkg, stage, env, wd, opts],
+ [runHookLifecycle, pkg, stage, env, wd, opts]
+ ],
+ done
+ )
+}
- exports.Int8Array = exports.Int8Array || Int8Array;
- exports.Uint8Array = exports.Uint8Array || Uint8Array;
- exports.Uint8ClampedArray = exports.Uint8ClampedArray || Uint8ClampedArray;
- exports.Int16Array = exports.Int16Array || Int16Array;
- exports.Uint16Array = exports.Uint16Array || Uint16Array;
- exports.Int32Array = exports.Int32Array || Int32Array;
- exports.Uint32Array = exports.Uint32Array || Uint32Array;
- exports.Float32Array = exports.Float32Array || Float32Array;
- exports.Float64Array = exports.Float64Array || Float64Array;
-}());
+function shouldPrependCurrentNodeDirToPATH (opts) {
+ const cfgsetting = opts.scriptsPrependNodePath
+ if (cfgsetting === false) return false
+ if (cfgsetting === true) return true
-//
-// 6 The DataView View Type
-//
+ var isDifferentNodeInPath
-(function() {
- function r(array, index) {
- return ECMAScript.IsCallable(array.get) ? array.get(index) : array[index];
+ var foundExecPath
+ try {
+ foundExecPath = which.sync(path.basename(process.execPath), { pathExt: isWindows ? ';' : ':' })
+ // Apply `fs.realpath()` here to avoid false positives when `node` is a symlinked executable.
+ isDifferentNodeInPath = fs.realpathSync(process.execPath).toUpperCase() !==
+ fs.realpathSync(foundExecPath).toUpperCase()
+ } catch (e) {
+ isDifferentNodeInPath = true
}
- var IS_BIG_ENDIAN = (function() {
- var u16array = new(exports.Uint16Array)([0x1234]),
- u8array = new(exports.Uint8Array)(u16array.buffer);
- return r(u8array, 0) === 0x12;
- }());
-
- // Constructor(ArrayBuffer buffer,
- // optional unsigned long byteOffset,
- // optional unsigned long byteLength)
- /** @constructor */
- var DataView = function DataView(buffer, byteOffset, byteLength) {
- if (arguments.length === 0) {
- buffer = new exports.ArrayBuffer(0);
- } else if (!(buffer instanceof exports.ArrayBuffer || ECMAScript.Class(buffer) === 'ArrayBuffer')) {
- throw new TypeError("TypeError");
+ if (cfgsetting === 'warn-only') {
+ if (isDifferentNodeInPath && !shouldPrependCurrentNodeDirToPATH.hasWarned) {
+ if (foundExecPath) {
+ opts.log.warn('lifecycle', 'The node binary used for scripts is', foundExecPath, 'but npm is using', process.execPath, 'itself. Use the `--scripts-prepend-node-path` option to include the path for the node binary npm was executed with.')
+ } else {
+ opts.log.warn('lifecycle', 'npm is using', process.execPath, 'but there is no node binary in the current PATH. Use the `--scripts-prepend-node-path` option to include the path for the node binary npm was executed with.')
+ }
+ shouldPrependCurrentNodeDirToPATH.hasWarned = true
}
- this.buffer = buffer || new exports.ArrayBuffer(0);
+ return false
+ }
- this.byteOffset = ECMAScript.ToUint32(byteOffset);
- if (this.byteOffset > this.buffer.byteLength) {
- throw new RangeError("byteOffset out of range");
- }
+ return isDifferentNodeInPath
+}
- if (arguments.length < 3) {
- this.byteLength = this.buffer.byteLength - this.byteOffset;
- } else {
- this.byteLength = ECMAScript.ToUint32(byteLength);
+function validWd (d, cb) {
+ fs.stat(d, function (er, st) {
+ if (er || !st.isDirectory()) {
+ var p = path.dirname(d)
+ if (p === d) {
+ return cb(new Error('Could not find suitable wd'))
+ }
+ return validWd(p, cb)
}
+ return cb(null, d)
+ })
+}
- if ((this.byteOffset + this.byteLength) > this.buffer.byteLength) {
- throw new RangeError("byteOffset and length reference an area beyond the end of the buffer");
- }
+function runPackageLifecycle (pkg, stage, env, wd, opts, cb) {
+ // run package lifecycle scripts in the package root, or the nearest parent.
+ var cmd = env.npm_lifecycle_script
- configureProperties(this);
- };
+ var note = '\n> ' + pkg._id + ' ' + stage + ' ' + wd +
+ '\n> ' + cmd + '\n'
+ runCmd(note, cmd, pkg, env, stage, wd, opts, cb)
+}
- function makeGetter(arrayType) {
- return function(byteOffset, littleEndian) {
+var running = false
+var queue = []
+function dequeue () {
+ running = false
+ if (queue.length) {
+ var r = queue.shift()
+ runCmd.apply(null, r)
+ }
+}
- byteOffset = ECMAScript.ToUint32(byteOffset);
+function runCmd (note, cmd, pkg, env, stage, wd, opts, cb) {
+ if (running) {
+ queue.push([note, cmd, pkg, env, stage, wd, opts, cb])
+ return
+ }
- if (byteOffset + arrayType.BYTES_PER_ELEMENT > this.byteLength) {
- throw new RangeError("Array index out of range");
- }
- byteOffset += this.byteOffset;
+ running = true
+ opts.log.pause()
+ var unsafe = opts.unsafePerm
+ var user = unsafe ? null : opts.user
+ var group = unsafe ? null : opts.group
- var uint8Array = new exports.Uint8Array(this.buffer, byteOffset, arrayType.BYTES_PER_ELEMENT),
- bytes = [], i;
- for (i = 0; i < arrayType.BYTES_PER_ELEMENT; i += 1) {
- bytes.push(r(uint8Array, i));
- }
+ if (opts.log.level !== 'silent') {
+ opts.log.clearProgress()
+ console.log(note)
+ opts.log.showProgress()
+ }
+ opts.log.verbose('lifecycle', logid(pkg, stage), 'unsafe-perm in lifecycle', unsafe)
- if (Boolean(littleEndian) === Boolean(IS_BIG_ENDIAN)) {
- bytes.reverse();
+ if (isWindows) {
+ unsafe = true
+ }
+
+ if (unsafe) {
+ runCmd_(cmd, pkg, env, wd, opts, stage, unsafe, 0, 0, cb)
+ } else {
+ uidNumber(user, group, function (er, uid, gid) {
+ if (er) {
+ er.code = 'EUIDLOOKUP'
+ opts.log.resume()
+ process.nextTick(dequeue)
+ return cb(er)
}
+ runCmd_(cmd, pkg, env, wd, opts, stage, unsafe, uid, gid, cb)
+ })
+ }
+}
- return r(new arrayType(new exports.Uint8Array(bytes).buffer), 0);
- };
+const getSpawnArgs = ({ cmd, wd, opts, uid, gid, unsafe, env }) => {
+ const conf = {
+ cwd: wd,
+ env: env,
+ stdio: opts.stdio || [ 0, 1, 2 ]
}
- DataView.prototype.getUint8 = makeGetter(exports.Uint8Array);
- DataView.prototype.getInt8 = makeGetter(exports.Int8Array);
- DataView.prototype.getUint16 = makeGetter(exports.Uint16Array);
- DataView.prototype.getInt16 = makeGetter(exports.Int16Array);
- DataView.prototype.getUint32 = makeGetter(exports.Uint32Array);
- DataView.prototype.getInt32 = makeGetter(exports.Int32Array);
- DataView.prototype.getFloat32 = makeGetter(exports.Float32Array);
- DataView.prototype.getFloat64 = makeGetter(exports.Float64Array);
+ if (!unsafe) {
+ conf.uid = uid ^ 0
+ conf.gid = gid ^ 0
+ }
- function makeSetter(arrayType) {
- return function(byteOffset, value, littleEndian) {
+ const customShell = opts.scriptShell
- byteOffset = ECMAScript.ToUint32(byteOffset);
- if (byteOffset + arrayType.BYTES_PER_ELEMENT > this.byteLength) {
- throw new RangeError("Array index out of range");
+ let sh = 'sh'
+ let shFlag = '-c'
+ if (customShell) {
+ sh = customShell
+ } else if (isWindows || opts._TESTING_FAKE_WINDOWS_) {
+ sh = process.env.comspec || 'cmd'
+ // '/d /s /c' is used only for cmd.exe.
+ if (/^(?:.*\\)?cmd(?:\.exe)?$/i.test(sh)) {
+ shFlag = '/d /s /c'
+ conf.windowsVerbatimArguments = true
+ }
+ }
+
+ return [sh, [shFlag, cmd], conf]
+}
+
+exports._getSpawnArgs = getSpawnArgs
+
+function runCmd_ (cmd, pkg, env, wd, opts, stage, unsafe, uid, gid, cb_) {
+ function cb (er) {
+ cb_.apply(null, arguments)
+ opts.log.resume()
+ process.nextTick(dequeue)
+ }
+
+ const [sh, args, conf] = getSpawnArgs({ cmd, wd, opts, uid, gid, unsafe, env })
+
+ opts.log.verbose('lifecycle', logid(pkg, stage), 'PATH:', env[PATH])
+ opts.log.verbose('lifecycle', logid(pkg, stage), 'CWD:', wd)
+ opts.log.silly('lifecycle', logid(pkg, stage), 'Args:', args)
+
+ var proc = spawn(sh, args, conf, opts.log)
+
+ proc.on('error', procError)
+ proc.on('close', function (code, signal) {
+ opts.log.silly('lifecycle', logid(pkg, stage), 'Returned: code:', code, ' signal:', signal)
+ if (signal) {
+ process.kill(process.pid, signal)
+ } else if (code) {
+ var er = new Error('Exit status ' + code)
+ er.errno = code
+ }
+ procError(er)
+ })
+ byline(proc.stdout).on('data', function (data) {
+ opts.log.verbose('lifecycle', logid(pkg, stage), 'stdout', data.toString())
+ })
+ byline(proc.stderr).on('data', function (data) {
+ opts.log.verbose('lifecycle', logid(pkg, stage), 'stderr', data.toString())
+ })
+ process.once('SIGTERM', procKill)
+ process.once('SIGINT', procInterupt)
+
+ function procError (er) {
+ if (er) {
+ opts.log.info('lifecycle', logid(pkg, stage), 'Failed to exec ' + stage + ' script')
+ er.message = pkg._id + ' ' + stage + ': `' + cmd + '`\n' +
+ er.message
+ if (er.code !== 'EPERM') {
+ er.code = 'ELIFECYCLE'
}
+ fs.stat(opts.dir, function (statError, d) {
+ if (statError && statError.code === 'ENOENT' && opts.dir.split(path.sep).slice(-1)[0] === 'node_modules') {
+ opts.log.warn('', 'Local package.json exists, but node_modules missing, did you mean to install?')
+ }
+ })
+ er.pkgid = pkg._id
+ er.stage = stage
+ er.script = cmd
+ er.pkgname = pkg.name
+ }
+ process.removeListener('SIGTERM', procKill)
+ process.removeListener('SIGTERM', procInterupt)
+ process.removeListener('SIGINT', procKill)
+ process.removeListener('SIGINT', procInterupt)
+ return cb(er)
+ }
+ function procKill () {
+ proc.kill()
+ }
+ function procInterupt () {
+ proc.kill('SIGINT')
+ proc.on('exit', function () {
+ process.exit()
+ })
+ process.once('SIGINT', procKill)
+ }
+}
- // Get bytes
- var typeArray = new arrayType([value]),
- byteArray = new exports.Uint8Array(typeArray.buffer),
- bytes = [], i, byteView;
+function runHookLifecycle (pkg, stage, env, wd, opts, cb) {
+ hookStat(opts.dir, stage, function (er) {
+ if (er) return cb()
+ var cmd = path.join(opts.dir, '.hooks', stage)
+ var note = '\n> ' + pkg._id + ' ' + stage + ' ' + wd +
+ '\n> ' + cmd
+ runCmd(note, cmd, pkg, env, stage, wd, opts, cb)
+ })
+}
- for (i = 0; i < arrayType.BYTES_PER_ELEMENT; i += 1) {
- bytes.push(r(byteArray, i));
+function makeEnv (data, opts, prefix, env) {
+ prefix = prefix || 'npm_package_'
+ if (!env) {
+ env = {}
+ for (var i in process.env) {
+ if (!i.match(/^npm_/)) {
+ env[i] = process.env[i]
}
+ }
- // Flip if necessary
- if (Boolean(littleEndian) === Boolean(IS_BIG_ENDIAN)) {
- bytes.reverse();
+ // express and others respect the NODE_ENV value.
+ if (opts.production) env.NODE_ENV = 'production'
+ } else if (!data.hasOwnProperty('_lifecycleEnv')) {
+ Object.defineProperty(data, '_lifecycleEnv',
+ {
+ value: env,
+ enumerable: false
}
+ )
+ }
- // Write them
- byteView = new exports.Uint8Array(this.buffer, byteOffset, arrayType.BYTES_PER_ELEMENT);
- byteView.set(bytes);
- };
+ if (opts.nodeOptions) env.NODE_OPTIONS = opts.nodeOptions
+
+ for (i in data) {
+ if (i.charAt(0) !== '_') {
+ var envKey = (prefix + i).replace(/[^a-zA-Z0-9_]/g, '_')
+ if (i === 'readme') {
+ continue
+ }
+ if (data[i] && typeof data[i] === 'object') {
+ try {
+ // quick and dirty detection for cyclical structures
+ JSON.stringify(data[i])
+ makeEnv(data[i], opts, envKey + '_', env)
+ } catch (ex) {
+ // usually these are package objects.
+ // just get the path and basic details.
+ var d = data[i]
+ makeEnv(
+ { name: d.name, version: d.version, path: d.path },
+ opts,
+ envKey + '_',
+ env
+ )
+ }
+ } else {
+ env[envKey] = String(data[i])
+ env[envKey] = env[envKey].indexOf('\n') !== -1
+ ? JSON.stringify(env[envKey])
+ : env[envKey]
+ }
+ }
}
- DataView.prototype.setUint8 = makeSetter(exports.Uint8Array);
- DataView.prototype.setInt8 = makeSetter(exports.Int8Array);
- DataView.prototype.setUint16 = makeSetter(exports.Uint16Array);
- DataView.prototype.setInt16 = makeSetter(exports.Int16Array);
- DataView.prototype.setUint32 = makeSetter(exports.Uint32Array);
- DataView.prototype.setInt32 = makeSetter(exports.Int32Array);
- DataView.prototype.setFloat32 = makeSetter(exports.Float32Array);
- DataView.prototype.setFloat64 = makeSetter(exports.Float64Array);
+ if (prefix !== 'npm_package_') return env
- exports.DataView = exports.DataView || DataView;
+ prefix = 'npm_config_'
+ var pkgConfig = {}
+ var pkgVerConfig = {}
+ var namePref = data.name + ':'
+ var verPref = data.name + '@' + data.version + ':'
-}());
+ Object.keys(opts.config).forEach(function (i) {
+ // in some rare cases (e.g. working with nerf darts), there are segmented
+ // "private" (underscore-prefixed) config names -- don't export
+ if ((i.charAt(0) === '_' && i.indexOf('_' + namePref) !== 0) || i.match(/:_/)) {
+ return
+ }
+ var value = opts.config[i]
+ if (value instanceof Stream || Array.isArray(value) || typeof value === 'function') return
+ if (i.match(/umask/)) value = umask.toString(value)
+ if (!value) value = ''
+ else if (typeof value === 'number') value = '' + value
+ else if (typeof value !== 'string') value = JSON.stringify(value)
-/***/ }),
-/* 318 */
-/***/ (function(__unusedmodule, exports) {
+ if (typeof value !== 'string') {
+ return
+ }
-"use strict";
+ value = value.indexOf('\n') !== -1
+ ? JSON.stringify(value)
+ : value
+ i = i.replace(/^_+/, '')
+ var k
+ if (i.indexOf(namePref) === 0) {
+ k = i.substr(namePref.length).replace(/[^a-zA-Z0-9_]/g, '_')
+ pkgConfig[k] = value
+ } else if (i.indexOf(verPref) === 0) {
+ k = i.substr(verPref.length).replace(/[^a-zA-Z0-9_]/g, '_')
+ pkgVerConfig[k] = value
+ }
+ var envKey = (prefix + i).replace(/[^a-zA-Z0-9_]/g, '_')
+ env[envKey] = value
+ })
+
+ prefix = 'npm_package_config_'
+ ;[pkgConfig, pkgVerConfig].forEach(function (conf) {
+ for (var i in conf) {
+ var envKey = (prefix + i)
+ env[envKey] = conf[i]
+ }
+ })
+
+ return env
+}
-/*
- * Copyright The OpenTelemetry Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-Object.defineProperty(exports, "__esModule", { value: true });
-//# sourceMappingURL=BatchObserverResult.js.map
/***/ }),
-/* 319 */,
-/* 320 */,
-/* 321 */
+/* 261 */
/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
-module.exports = function(
- Promise, PromiseArray, tryConvertToPromise, apiRejection) {
-var util = __webpack_require__(248);
-var isObject = util.isObject;
-var es5 = __webpack_require__(883);
-var Es6Map;
-if (typeof Map === "function") Es6Map = Map;
+var process = __webpack_require__(356)
+try {
+ module.exports = setImmediate
+} catch (ex) {
+ module.exports = process.nextTick
+}
-var mapToEntries = (function() {
- var index = 0;
- var size = 0;
- function extractEntry(value, key) {
- this[index] = value;
- this[index + size] = key;
- index++;
- }
+/***/ }),
+/* 262 */
+/***/ (function(module) {
- return function mapToEntries(map) {
- size = map.size;
- index = 0;
- var ret = new Array(map.size * 2);
- map.forEach(extractEntry, ret);
- return ret;
- };
-})();
+var toString = {}.toString;
-var entriesToMap = function(entries) {
- var ret = new Es6Map();
- var length = entries.length / 2 | 0;
- for (var i = 0; i < length; ++i) {
- var key = entries[length + i];
- var value = entries[i];
- ret.set(key, value);
- }
- return ret;
+module.exports = Array.isArray || function (arr) {
+ return toString.call(arr) == '[object Array]';
};
-function PropertiesPromiseArray(obj) {
- var isMap = false;
- var entries;
- if (Es6Map !== undefined && obj instanceof Es6Map) {
- entries = mapToEntries(obj);
- isMap = true;
- } else {
- var keys = es5.keys(obj);
- var len = keys.length;
- entries = new Array(len * 2);
- for (var i = 0; i < len; ++i) {
- var key = keys[i];
- entries[i] = obj[key];
- entries[i + len] = key;
- }
- }
- this.constructor$(entries);
- this._isMap = isMap;
- this._init$(undefined, isMap ? -6 : -3);
-}
-util.inherits(PropertiesPromiseArray, PromiseArray);
-PropertiesPromiseArray.prototype._init = function () {};
+/***/ }),
+/* 263 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
-PropertiesPromiseArray.prototype._promiseFulfilled = function (value, index) {
- this._values[index] = value;
- var totalResolved = ++this._totalResolved;
- if (totalResolved >= this._length) {
- var val;
- if (this._isMap) {
- val = entriesToMap(this._values);
- } else {
- val = {};
- var keyOffset = this.length();
- for (var i = 0, len = this.length(); i < len; ++i) {
- val[this._values[i + keyOffset]] = this._values[i];
- }
- }
- this._resolve(val);
- return true;
- }
- return false;
-};
+"use strict";
-PropertiesPromiseArray.prototype.shouldCopyValues = function () {
- return false;
-};
-PropertiesPromiseArray.prototype.getActualLength = function (len) {
- return len >> 1;
-};
+Object.defineProperty(exports, '__esModule', { value: true });
-function props(promises) {
- var ret;
- var castValue = tryConvertToPromise(promises);
+var api = __webpack_require__(440);
+var tslib = __webpack_require__(144);
- if (!isObject(castValue)) {
- return apiRejection("cannot await properties of a non-object\u000a\u000a See http://goo.gl/MqrFmX\u000a");
- } else if (castValue instanceof Promise) {
- ret = castValue._then(
- Promise.props, undefined, undefined, undefined, undefined);
- } else {
- ret = new PropertiesPromiseArray(castValue).promise();
+// Copyright (c) Microsoft Corporation.
+/**
+ * A no-op implementation of Span that can safely be used without side-effects.
+ */
+var NoOpSpan = /** @class */ (function () {
+ function NoOpSpan() {
}
+ /**
+ * Returns the SpanContext associated with this Span.
+ */
+ NoOpSpan.prototype.context = function () {
+ return {
+ spanId: "",
+ traceId: "",
+ traceFlags: api.TraceFlags.NONE
+ };
+ };
+ /**
+ * Marks the end of Span execution.
+ * @param _endTime The time to use as the Span's end time. Defaults to
+ * the current time.
+ */
+ NoOpSpan.prototype.end = function (_endTime) {
+ /* Noop */
+ };
+ /**
+ * Sets an attribute on the Span
+ * @param _key the attribute key
+ * @param _value the attribute value
+ */
+ NoOpSpan.prototype.setAttribute = function (_key, _value) {
+ return this;
+ };
+ /**
+ * Sets attributes on the Span
+ * @param _attributes the attributes to add
+ */
+ NoOpSpan.prototype.setAttributes = function (_attributes) {
+ return this;
+ };
+ /**
+ * Adds an event to the Span
+ * @param _name The name of the event
+ * @param _attributes The associated attributes to add for this event
+ */
+ NoOpSpan.prototype.addEvent = function (_name, _attributes) {
+ return this;
+ };
+ /**
+ * Sets a status on the span. Overrides the default of CanonicalCode.OK.
+ * @param _status The status to set.
+ */
+ NoOpSpan.prototype.setStatus = function (_status) {
+ return this;
+ };
+ /**
+ * Updates the name of the Span
+ * @param _name the new Span name
+ */
+ NoOpSpan.prototype.updateName = function (_name) {
+ return this;
+ };
+ /**
+ * Returns whether this span will be recorded
+ */
+ NoOpSpan.prototype.isRecording = function () {
+ return false;
+ };
+ return NoOpSpan;
+}());
- if (castValue instanceof Promise) {
- ret._propagateFrom(castValue, 2);
+// Copyright (c) Microsoft Corporation.
+/**
+ * A no-op implementation of Tracer that can be used when tracing
+ * is disabled.
+ */
+var NoOpTracer = /** @class */ (function () {
+ function NoOpTracer() {
}
- return ret;
-}
-
-Promise.prototype.props = function () {
- return props(this);
-};
-
-Promise.props = function (promises) {
- return props(promises);
-};
-};
-
-
-/***/ }),
-/* 322 */,
-/* 323 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-"use strict";
-
-module.exports =
-function(Promise, PromiseArray, apiRejection) {
-var util = __webpack_require__(248);
-var RangeError = __webpack_require__(351).RangeError;
-var AggregateError = __webpack_require__(351).AggregateError;
-var isArray = util.isArray;
-var CANCELLATION = {};
-
+ /**
+ * Starts a new Span.
+ * @param _name The name of the span.
+ * @param _options The SpanOptions used during Span creation.
+ */
+ NoOpTracer.prototype.startSpan = function (_name, _options) {
+ return new NoOpSpan();
+ };
+ /**
+ * Returns the current Span from the current context, if available.
+ */
+ NoOpTracer.prototype.getCurrentSpan = function () {
+ return new NoOpSpan();
+ };
+ /**
+ * Executes the given function within the context provided by a Span.
+ * @param _span The span that provides the context.
+ * @param fn The function to be executed.
+ */
+ NoOpTracer.prototype.withSpan = function (_span, fn) {
+ return fn();
+ };
+ /**
+ * Bind a Span as the target's scope
+ * @param target An object to bind the scope.
+ * @param _span A specific Span to use. Otherwise, use the current one.
+ */
+ NoOpTracer.prototype.bind = function (target, _span) {
+ return target;
+ };
+ return NoOpTracer;
+}());
-function SomePromiseArray(values) {
- this.constructor$(values);
- this._howMany = 0;
- this._unwrap = false;
- this._initialized = false;
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+function getGlobalObject() {
+ return global;
}
-util.inherits(SomePromiseArray, PromiseArray);
-SomePromiseArray.prototype._init = function () {
- if (!this._initialized) {
- return;
+// Copyright (c) Microsoft Corporation.
+// V1 = OpenTelemetry 0.1
+// V2 = OpenTelemetry 0.2
+// V3 = OpenTelemetry 0.6.1
+var GLOBAL_TRACER_VERSION = 3;
+// preview5 shipped with @azure/core-tracing.tracerCache
+// and didn't have smart detection for collisions
+var GLOBAL_TRACER_SYMBOL = Symbol.for("@azure/core-tracing.tracerCache2");
+var cache;
+function loadTracerCache() {
+ var globalObj = getGlobalObject();
+ var existingCache = globalObj[GLOBAL_TRACER_SYMBOL];
+ var setGlobalCache = true;
+ if (existingCache) {
+ if (existingCache.version === GLOBAL_TRACER_VERSION) {
+ cache = existingCache;
+ }
+ else {
+ setGlobalCache = false;
+ if (existingCache.tracer) {
+ throw new Error("Two incompatible versions of @azure/core-tracing have been loaded.\n This library is " + GLOBAL_TRACER_VERSION + ", existing is " + existingCache.version + ".");
+ }
+ }
}
- if (this._howMany === 0) {
- this._resolve([]);
- return;
+ if (!cache) {
+ cache = {
+ tracer: undefined,
+ version: GLOBAL_TRACER_VERSION
+ };
}
- this._init$(undefined, -5);
- var isArrayResolved = isArray(this._values);
- if (!this._isResolved() &&
- isArrayResolved &&
- this._howMany > this._canPossiblyFulfill()) {
- this._reject(this._getRangeError(this.length()));
+ if (setGlobalCache) {
+ globalObj[GLOBAL_TRACER_SYMBOL] = cache;
}
-};
-
-SomePromiseArray.prototype.init = function () {
- this._initialized = true;
- this._init();
-};
-
-SomePromiseArray.prototype.setUnwrap = function () {
- this._unwrap = true;
-};
-
-SomePromiseArray.prototype.howMany = function () {
- return this._howMany;
-};
-
-SomePromiseArray.prototype.setHowMany = function (count) {
- this._howMany = count;
-};
-
-SomePromiseArray.prototype._promiseFulfilled = function (value) {
- this._addFulfilled(value);
- if (this._fulfilled() === this.howMany()) {
- this._values.length = this.howMany();
- if (this.howMany() === 1 && this._unwrap) {
- this._resolve(this._values[0]);
- } else {
- this._resolve(this._values);
- }
- return true;
+}
+function getCache() {
+ if (!cache) {
+ loadTracerCache();
}
- return false;
+ return cache;
+}
-};
-SomePromiseArray.prototype._promiseRejected = function (reason) {
- this._addRejected(reason);
- return this._checkOutcome();
-};
+// Copyright (c) Microsoft Corporation.
+var defaultTracer;
+function getDefaultTracer() {
+ if (!defaultTracer) {
+ defaultTracer = new NoOpTracer();
+ }
+ return defaultTracer;
+}
+/**
+ * Sets the global tracer, enabling tracing for the Azure SDK.
+ * @param tracer An OpenTelemetry Tracer instance.
+ */
+function setTracer(tracer) {
+ var cache = getCache();
+ cache.tracer = tracer;
+}
+/**
+ * Retrieves the active tracer, or returns a
+ * no-op implementation if one is not set.
+ */
+function getTracer() {
+ var cache = getCache();
+ if (!cache.tracer) {
+ return getDefaultTracer();
+ }
+ return cache.tracer;
+}
-SomePromiseArray.prototype._promiseCancelled = function () {
- if (this._values instanceof Promise || this._values == null) {
- return this._cancel();
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+/**
+ * @ignore
+ * @internal
+ */
+var OpenCensusTraceStateWrapper = /** @class */ (function () {
+ function OpenCensusTraceStateWrapper(state) {
+ this._state = state;
}
- this._addRejected(CANCELLATION);
- return this._checkOutcome();
-};
+ OpenCensusTraceStateWrapper.prototype.get = function (_key) {
+ throw new Error("Method not implemented.");
+ };
+ OpenCensusTraceStateWrapper.prototype.set = function (_key, _value) {
+ throw new Error("Method not implemented.");
+ };
+ OpenCensusTraceStateWrapper.prototype.unset = function (_key) {
+ throw new Error("Method not implemented");
+ };
+ OpenCensusTraceStateWrapper.prototype.serialize = function () {
+ return this._state || "";
+ };
+ return OpenCensusTraceStateWrapper;
+}());
-SomePromiseArray.prototype._checkOutcome = function() {
- if (this.howMany() > this._canPossiblyFulfill()) {
- var e = new AggregateError();
- for (var i = this.length(); i < this._values.length; ++i) {
- if (this._values[i] !== CANCELLATION) {
- e.push(this._values[i]);
+// Copyright (c) Microsoft Corporation.
+function isWrappedSpan(span) {
+ return !!span && span.getWrappedSpan !== undefined;
+}
+function isTracer(tracerOrSpan) {
+ return tracerOrSpan.getWrappedTracer !== undefined;
+}
+/**
+ * An implementation of OpenTelemetry Span that wraps an OpenCensus Span.
+ */
+var OpenCensusSpanWrapper = /** @class */ (function () {
+ function OpenCensusSpanWrapper(tracerOrSpan, name, options) {
+ if (name === void 0) { name = ""; }
+ if (options === void 0) { options = {}; }
+ if (isTracer(tracerOrSpan)) {
+ var parent = isWrappedSpan(options.parent) ? options.parent.getWrappedSpan() : undefined;
+ this._span = tracerOrSpan.getWrappedTracer().startChildSpan({
+ name: name,
+ childOf: parent
+ });
+ this._span.start();
+ if (options.links) {
+ for (var _i = 0, _a = options.links; _i < _a.length; _i++) {
+ var link = _a[_i];
+ // Since there is no way to set the link relationship, leave it as Unspecified.
+ this._span.addLink(link.context.traceId, link.context.spanId, 0 /* LinkType.UNSPECIFIED */, link.attributes);
+ }
}
}
- if (e.length > 0) {
- this._reject(e);
- } else {
- this._cancel();
+ else {
+ this._span = tracerOrSpan;
}
- return true;
}
- return false;
-};
-
-SomePromiseArray.prototype._fulfilled = function () {
- return this._totalResolved;
-};
-
-SomePromiseArray.prototype._rejected = function () {
- return this._values.length - this.length();
-};
-
-SomePromiseArray.prototype._addRejected = function (reason) {
- this._values.push(reason);
-};
-
-SomePromiseArray.prototype._addFulfilled = function (value) {
- this._values[this._totalResolved++] = value;
-};
-
-SomePromiseArray.prototype._canPossiblyFulfill = function () {
- return this.length() - this._rejected();
-};
-
-SomePromiseArray.prototype._getRangeError = function (count) {
- var message = "Input array must contain at least " +
- this._howMany + " items but contains only " + count + " items";
- return new RangeError(message);
-};
-
-SomePromiseArray.prototype._resolveEmptyArray = function () {
- this._reject(this._getRangeError(0));
-};
+ /**
+ * The underlying OpenCensus Span
+ */
+ OpenCensusSpanWrapper.prototype.getWrappedSpan = function () {
+ return this._span;
+ };
+ /**
+ * Marks the end of Span execution.
+ * @param endTime The time to use as the Span's end time. Defaults to
+ * the current time.
+ */
+ OpenCensusSpanWrapper.prototype.end = function (_endTime) {
+ this._span.end();
+ };
+ /**
+ * Returns the SpanContext associated with this Span.
+ */
+ OpenCensusSpanWrapper.prototype.context = function () {
+ var openCensusSpanContext = this._span.spanContext;
+ return {
+ spanId: openCensusSpanContext.spanId,
+ traceId: openCensusSpanContext.traceId,
+ traceFlags: openCensusSpanContext.options,
+ traceState: new OpenCensusTraceStateWrapper(openCensusSpanContext.traceState)
+ };
+ };
+ /**
+ * Sets an attribute on the Span
+ * @param key the attribute key
+ * @param value the attribute value
+ */
+ OpenCensusSpanWrapper.prototype.setAttribute = function (key, value) {
+ this._span.addAttribute(key, value);
+ return this;
+ };
+ /**
+ * Sets attributes on the Span
+ * @param attributes the attributes to add
+ */
+ OpenCensusSpanWrapper.prototype.setAttributes = function (attributes) {
+ this._span.attributes = attributes;
+ return this;
+ };
+ /**
+ * Adds an event to the Span
+ * @param name The name of the event
+ * @param attributes The associated attributes to add for this event
+ */
+ OpenCensusSpanWrapper.prototype.addEvent = function (_name, _attributes) {
+ throw new Error("Method not implemented.");
+ };
+ /**
+ * Sets a status on the span. Overrides the default of CanonicalCode.OK.
+ * @param status The status to set.
+ */
+ OpenCensusSpanWrapper.prototype.setStatus = function (status) {
+ this._span.setStatus(status.code, status.message);
+ return this;
+ };
+ /**
+ * Updates the name of the Span
+ * @param name the new Span name
+ */
+ OpenCensusSpanWrapper.prototype.updateName = function (name) {
+ this._span.name = name;
+ return this;
+ };
+ /**
+ * Returns whether this span will be recorded
+ */
+ OpenCensusSpanWrapper.prototype.isRecording = function () {
+ // NoRecordSpans have an empty traceId
+ return !!this._span.traceId;
+ };
+ return OpenCensusSpanWrapper;
+}());
-function some(promises, howMany) {
- if ((howMany | 0) !== howMany || howMany < 0) {
- return apiRejection("expecting a positive integer\u000a\u000a See http://goo.gl/MqrFmX\u000a");
+// Copyright (c) Microsoft Corporation.
+/**
+ * An implementation of OpenTelemetry Tracer that wraps an OpenCensus Tracer.
+ */
+var OpenCensusTracerWrapper = /** @class */ (function () {
+ /**
+ * Create a new wrapper around a given OpenCensus Tracer.
+ * @param tracer The OpenCensus Tracer to wrap.
+ */
+ function OpenCensusTracerWrapper(tracer) {
+ this._tracer = tracer;
}
- var ret = new SomePromiseArray(promises);
- var promise = ret.promise();
- ret.setHowMany(howMany);
- ret.init();
- return promise;
-}
-
-Promise.some = function (promises, howMany) {
- return some(promises, howMany);
-};
-
-Promise.prototype.some = function (howMany) {
- return some(this, howMany);
-};
-
-Promise._SomePromiseArray = SomePromiseArray;
-};
-
-
-/***/ }),
-/* 324 */,
-/* 325 */
+ /**
+ * The wrapped OpenCensus Tracer
+ */
+ OpenCensusTracerWrapper.prototype.getWrappedTracer = function () {
+ return this._tracer;
+ };
+ /**
+ * Starts a new Span.
+ * @param name The name of the span.
+ * @param options The SpanOptions used during Span creation.
+ */
+ OpenCensusTracerWrapper.prototype.startSpan = function (name, options) {
+ return new OpenCensusSpanWrapper(this, name, options);
+ };
+ /**
+ * Returns the current Span from the current context, if available.
+ */
+ OpenCensusTracerWrapper.prototype.getCurrentSpan = function () {
+ return undefined;
+ };
+ /**
+ * Executes the given function within the context provided by a Span.
+ * @param _span The span that provides the context.
+ * @param _fn The function to be executed.
+ */
+ OpenCensusTracerWrapper.prototype.withSpan = function (_span, _fn) {
+ throw new Error("Method not implemented.");
+ };
+ /**
+ * Bind a Span as the target's scope
+ * @param target An object to bind the scope.
+ * @param _span A specific Span to use. Otherwise, use the current one.
+ */
+ OpenCensusTracerWrapper.prototype.bind = function (_target, _span) {
+ throw new Error("Method not implemented.");
+ };
+ return OpenCensusTracerWrapper;
+}());
+
+// Copyright (c) Microsoft Corporation.
+/**
+ * A mock span useful for testing.
+ */
+var TestSpan = /** @class */ (function (_super) {
+ tslib.__extends(TestSpan, _super);
+ /**
+ * Starts a new Span.
+ * @param parentTracer The tracer that created this Span
+ * @param name The name of the span.
+ * @param context The SpanContext this span belongs to
+ * @param kind The SpanKind of this Span
+ * @param parentSpanId The identifier of the parent Span
+ * @param startTime The startTime of the event (defaults to now)
+ */
+ function TestSpan(parentTracer, name, context, kind, parentSpanId, startTime) {
+ if (startTime === void 0) { startTime = Date.now(); }
+ var _this = _super.call(this) || this;
+ _this._tracer = parentTracer;
+ _this.name = name;
+ _this.kind = kind;
+ _this.startTime = startTime;
+ _this.parentSpanId = parentSpanId;
+ _this.status = {
+ code: api.CanonicalCode.OK
+ };
+ _this.endCalled = false;
+ _this._context = context;
+ _this.attributes = {};
+ return _this;
+ }
+ /**
+ * Returns the Tracer that created this Span
+ */
+ TestSpan.prototype.tracer = function () {
+ return this._tracer;
+ };
+ /**
+ * Returns the SpanContext associated with this Span.
+ */
+ TestSpan.prototype.context = function () {
+ return this._context;
+ };
+ /**
+ * Marks the end of Span execution.
+ * @param _endTime The time to use as the Span's end time. Defaults to
+ * the current time.
+ */
+ TestSpan.prototype.end = function (_endTime) {
+ this.endCalled = true;
+ };
+ /**
+ * Sets a status on the span. Overrides the default of CanonicalCode.OK.
+ * @param status The status to set.
+ */
+ TestSpan.prototype.setStatus = function (status) {
+ this.status = status;
+ return this;
+ };
+ /**
+ * Returns whether this span will be recorded
+ */
+ TestSpan.prototype.isRecording = function () {
+ return true;
+ };
+ /**
+ * Sets an attribute on the Span
+ * @param key the attribute key
+ * @param value the attribute value
+ */
+ TestSpan.prototype.setAttribute = function (key, value) {
+ this.attributes[key] = value;
+ return this;
+ };
+ /**
+ * Sets attributes on the Span
+ * @param attributes the attributes to add
+ */
+ TestSpan.prototype.setAttributes = function (attributes) {
+ for (var _i = 0, _a = Object.keys(attributes); _i < _a.length; _i++) {
+ var key = _a[_i];
+ this.attributes[key] = attributes[key];
+ }
+ return this;
+ };
+ return TestSpan;
+}(NoOpSpan));
+
+// Copyright (c) Microsoft Corporation.
+/**
+ * A mock tracer useful for testing
+ */
+var TestTracer = /** @class */ (function (_super) {
+ tslib.__extends(TestTracer, _super);
+ function TestTracer() {
+ var _this = _super !== null && _super.apply(this, arguments) || this;
+ _this.traceIdCounter = 0;
+ _this.spanIdCounter = 0;
+ _this.rootSpans = [];
+ _this.knownSpans = [];
+ return _this;
+ }
+ TestTracer.prototype.getNextTraceId = function () {
+ this.traceIdCounter++;
+ return String(this.traceIdCounter);
+ };
+ TestTracer.prototype.getNextSpanId = function () {
+ this.spanIdCounter++;
+ return String(this.spanIdCounter);
+ };
+ /**
+ * Returns all Spans that were created without a parent
+ */
+ TestTracer.prototype.getRootSpans = function () {
+ return this.rootSpans;
+ };
+ /**
+ * Returns all Spans this Tracer knows about
+ */
+ TestTracer.prototype.getKnownSpans = function () {
+ return this.knownSpans;
+ };
+ /**
+ * Returns all Spans where end() has not been called
+ */
+ TestTracer.prototype.getActiveSpans = function () {
+ return this.knownSpans.filter(function (span) {
+ return !span.endCalled;
+ });
+ };
+ /**
+ * Return all Spans for a particular trace, grouped by their
+ * parent Span in a tree-like structure
+ * @param traceId The traceId to return the graph for
+ */
+ TestTracer.prototype.getSpanGraph = function (traceId) {
+ var traceSpans = this.knownSpans.filter(function (span) {
+ return span.context().traceId === traceId;
+ });
+ var roots = [];
+ var nodeMap = new Map();
+ for (var _i = 0, traceSpans_1 = traceSpans; _i < traceSpans_1.length; _i++) {
+ var span = traceSpans_1[_i];
+ var spanId = span.context().spanId;
+ var node = {
+ name: span.name,
+ children: []
+ };
+ nodeMap.set(spanId, node);
+ if (span.parentSpanId) {
+ var parent = nodeMap.get(span.parentSpanId);
+ if (!parent) {
+ throw new Error("Span with name " + node.name + " has an unknown parentSpan with id " + span.parentSpanId);
+ }
+ parent.children.push(node);
+ }
+ else {
+ roots.push(node);
+ }
+ }
+ return {
+ roots: roots
+ };
+ };
+ /**
+ * Starts a new Span.
+ * @param name The name of the span.
+ * @param options The SpanOptions used during Span creation.
+ */
+ TestTracer.prototype.startSpan = function (name, options) {
+ if (options === void 0) { options = {}; }
+ var parentContext = this._getParentContext(options);
+ var traceId;
+ var isRootSpan = false;
+ if (parentContext && parentContext.traceId) {
+ traceId = parentContext.traceId;
+ }
+ else {
+ traceId = this.getNextTraceId();
+ isRootSpan = true;
+ }
+ var context = {
+ traceId: traceId,
+ spanId: this.getNextSpanId(),
+ traceFlags: api.TraceFlags.NONE
+ };
+ var span = new TestSpan(this, name, context, options.kind || api.SpanKind.INTERNAL, parentContext ? parentContext.spanId : undefined, options.startTime);
+ this.knownSpans.push(span);
+ if (isRootSpan) {
+ this.rootSpans.push(span);
+ }
+ return span;
+ };
+ TestTracer.prototype._getParentContext = function (options) {
+ var parent = options.parent;
+ var result;
+ if (parent) {
+ if ("traceId" in parent) {
+ result = parent;
+ }
+ else {
+ result = parent.context();
+ }
+ }
+ return result;
+ };
+ return TestTracer;
+}(NoOpTracer));
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+var VERSION = "00";
+/**
+ * Generates a `SpanContext` given a `traceparent` header value.
+ * @param traceParent Serialized span context data as a `traceparent` header value.
+ * @returns The `SpanContext` generated from the `traceparent` value.
+ */
+function extractSpanContextFromTraceParentHeader(traceParentHeader) {
+ var parts = traceParentHeader.split("-");
+ if (parts.length !== 4) {
+ return;
+ }
+ var version = parts[0], traceId = parts[1], spanId = parts[2], traceOptions = parts[3];
+ if (version !== VERSION) {
+ return;
+ }
+ var traceFlags = parseInt(traceOptions, 16);
+ var spanContext = {
+ spanId: spanId,
+ traceId: traceId,
+ traceFlags: traceFlags
+ };
+ return spanContext;
+}
+/**
+ * Generates a `traceparent` value given a span context.
+ * @param spanContext Contains context for a specific span.
+ * @returns The `spanContext` represented as a `traceparent` value.
+ */
+function getTraceParentHeader(spanContext) {
+ var missingFields = [];
+ if (!spanContext.traceId) {
+ missingFields.push("traceId");
+ }
+ if (!spanContext.spanId) {
+ missingFields.push("spanId");
+ }
+ if (missingFields.length) {
+ return;
+ }
+ var flags = spanContext.traceFlags || 0 /* NONE */;
+ var hexFlags = flags.toString(16);
+ var traceFlags = hexFlags.length === 1 ? "0" + hexFlags : hexFlags;
+ // https://www.w3.org/TR/trace-context/#traceparent-header-field-values
+ return VERSION + "-" + spanContext.traceId + "-" + spanContext.spanId + "-" + traceFlags;
+}
+
+exports.NoOpSpan = NoOpSpan;
+exports.NoOpTracer = NoOpTracer;
+exports.OpenCensusSpanWrapper = OpenCensusSpanWrapper;
+exports.OpenCensusTracerWrapper = OpenCensusTracerWrapper;
+exports.TestSpan = TestSpan;
+exports.TestTracer = TestTracer;
+exports.extractSpanContextFromTraceParentHeader = extractSpanContextFromTraceParentHeader;
+exports.getTraceParentHeader = getTraceParentHeader;
+exports.getTracer = getTracer;
+exports.setTracer = setTracer;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+/* 264 */
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
-}) : (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
- Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
- o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
- __setModuleDefault(result, mod);
- return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const core = __importStar(__webpack_require__(470));
-const run_1 = __webpack_require__(180);
-run_1.run().catch(core.setFailed);
+exports.TrackerGroup = __webpack_require__(398)
+exports.Tracker = __webpack_require__(623)
+exports.TrackerStream = __webpack_require__(235)
/***/ }),
-/* 326 */
-/***/ (function(__unusedmodule, exports) {
+/* 265 */
+/***/ (function(module) {
-"use strict";
+// Generated by CoffeeScript 1.12.7
+(function() {
+ var XMLNodeList;
+
+ module.exports = XMLNodeList = (function() {
+ function XMLNodeList(nodes) {
+ this.nodes = nodes;
+ }
+
+ Object.defineProperty(XMLNodeList.prototype, 'length', {
+ get: function() {
+ return this.nodes.length || 0;
+ }
+ });
+
+ XMLNodeList.prototype.clone = function() {
+ return this.nodes = null;
+ };
+
+ XMLNodeList.prototype.item = function(index) {
+ return this.nodes[index] || null;
+ };
+
+ return XMLNodeList;
+
+ })();
+
+}).call(this);
-/*
- * Copyright The OpenTelemetry Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-Object.defineProperty(exports, "__esModule", { value: true });
-//# sourceMappingURL=ObserverResult.js.map
/***/ }),
-/* 327 */
-/***/ (function(__unusedmodule, exports) {
+/* 266 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-/**
- * Indicates whether a pattern matches a path
- */
-var MatchKind;
-(function (MatchKind) {
- /** Not matched */
- MatchKind[MatchKind["None"] = 0] = "None";
- /** Matched if the path is a directory */
- MatchKind[MatchKind["Directory"] = 1] = "Directory";
- /** Matched if the path is a regular file */
- MatchKind[MatchKind["File"] = 2] = "File";
- /** Matched */
- MatchKind[MatchKind["All"] = 3] = "All";
-})(MatchKind = exports.MatchKind || (exports.MatchKind = {}));
-//# sourceMappingURL=internal-match-kind.js.map
+var os = __webpack_require__(87);
+
+function homedir() {
+ var env = process.env;
+ var home = env.HOME;
+ var user = env.LOGNAME || env.USER || env.LNAME || env.USERNAME;
+
+ if (process.platform === 'win32') {
+ return env.USERPROFILE || env.HOMEDRIVE + env.HOMEPATH || home || null;
+ }
+
+ if (process.platform === 'darwin') {
+ return home || (user ? '/Users/' + user : null);
+ }
+
+ if (process.platform === 'linux') {
+ return home || (process.getuid() === 0 ? '/root' : (user ? '/home/' + user : null));
+ }
+
+ return home || null;
+}
+
+module.exports = typeof os.homedir === 'function' ? os.homedir : homedir;
+
/***/ }),
-/* 328 */,
-/* 329 */,
-/* 330 */,
-/* 331 */,
-/* 332 */
+/* 267 */,
+/* 268 */
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
-/*!
- * Copyright (c) 2015, Salesforce.com, Inc.
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
- *
- * 2. Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
- *
- * 3. Neither the name of Salesforce.com nor the names of its contributors may
- * be used to endorse or promote products derived from this software without
- * specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
- * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
- * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
- * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
- * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
- * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
- * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
- * POSSIBILITY OF SUCH DAMAGE.
- */
-const { fromCallback } = __webpack_require__(147);
-const Store = __webpack_require__(338).Store;
-const permuteDomain = __webpack_require__(89).permuteDomain;
-const pathMatch = __webpack_require__(348).pathMatch;
-const util = __webpack_require__(669);
-class MemoryCookieStore extends Store {
- constructor() {
- super();
- this.synchronous = true;
- this.idx = {};
- if (util.inspect.custom) {
- this[util.inspect.custom] = this.inspect;
- }
- }
+const assert = __webpack_require__(357)
+const Buffer = __webpack_require__(293).Buffer
+const realZlib = __webpack_require__(761)
- inspect() {
- return `{ idx: ${util.inspect(this.idx, false, 2)} }`;
+const constants = exports.constants = __webpack_require__(60)
+const Minipass = __webpack_require__(720)
+
+const OriginalBufferConcat = Buffer.concat
+
+class ZlibError extends Error {
+ constructor (err) {
+ super('zlib: ' + err.message)
+ this.code = err.code
+ this.errno = err.errno
+ /* istanbul ignore if */
+ if (!this.code)
+ this.code = 'ZLIB_ERROR'
+
+ this.message = 'zlib: ' + err.message
+ Error.captureStackTrace(this, this.constructor)
}
- findCookie(domain, path, key, cb) {
- if (!this.idx[domain]) {
- return cb(null, undefined);
- }
- if (!this.idx[domain][path]) {
- return cb(null, undefined);
- }
- return cb(null, this.idx[domain][path][key] || null);
+ get name () {
+ return 'ZlibError'
}
- findCookies(domain, path, allowSpecialUseDomain, cb) {
- const results = [];
- if (typeof allowSpecialUseDomain === "function") {
- cb = allowSpecialUseDomain;
- allowSpecialUseDomain = false;
- }
- if (!domain) {
- return cb(null, []);
- }
+}
- let pathMatcher;
- if (!path) {
- // null means "all paths"
- pathMatcher = function matchAll(domainIndex) {
- for (const curPath in domainIndex) {
- const pathIndex = domainIndex[curPath];
- for (const key in pathIndex) {
- results.push(pathIndex[key]);
- }
- }
- };
- } else {
- pathMatcher = function matchRFC(domainIndex) {
- //NOTE: we should use path-match algorithm from S5.1.4 here
- //(see : https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/canonical_cookie.cc#L299)
- Object.keys(domainIndex).forEach(cookiePath => {
- if (pathMatch(path, cookiePath)) {
- const pathIndex = domainIndex[cookiePath];
- for (const key in pathIndex) {
- results.push(pathIndex[key]);
- }
- }
- });
- };
- }
+// the Zlib class they all inherit from
+// This thing manages the queue of requests, and returns
+// true or false if there is anything in the queue when
+// you call the .write() method.
+const _opts = Symbol('opts')
+const _flushFlag = Symbol('flushFlag')
+const _finishFlushFlag = Symbol('finishFlushFlag')
+const _fullFlushFlag = Symbol('fullFlushFlag')
+const _handle = Symbol('handle')
+const _onError = Symbol('onError')
+const _sawError = Symbol('sawError')
+const _level = Symbol('level')
+const _strategy = Symbol('strategy')
+const _ended = Symbol('ended')
+const _defaultFullFlush = Symbol('_defaultFullFlush')
- const domains = permuteDomain(domain, allowSpecialUseDomain) || [domain];
- const idx = this.idx;
- domains.forEach(curDomain => {
- const domainIndex = idx[curDomain];
- if (!domainIndex) {
- return;
- }
- pathMatcher(domainIndex);
- });
+class ZlibBase extends Minipass {
+ constructor (opts, mode) {
+ if (!opts || typeof opts !== 'object')
+ throw new TypeError('invalid options for ZlibBase constructor')
- cb(null, results);
- }
+ super(opts)
+ this[_ended] = false
+ this[_opts] = opts
- putCookie(cookie, cb) {
- if (!this.idx[cookie.domain]) {
- this.idx[cookie.domain] = {};
+ this[_flushFlag] = opts.flush
+ this[_finishFlushFlag] = opts.finishFlush
+ // this will throw if any options are invalid for the class selected
+ try {
+ this[_handle] = new realZlib[mode](opts)
+ } catch (er) {
+ // make sure that all errors get decorated properly
+ throw new ZlibError(er)
}
- if (!this.idx[cookie.domain][cookie.path]) {
- this.idx[cookie.domain][cookie.path] = {};
+
+ this[_onError] = (err) => {
+ this[_sawError] = true
+ // there is no way to cleanly recover.
+ // continuing only obscures problems.
+ this.close()
+ this.emit('error', err)
}
- this.idx[cookie.domain][cookie.path][cookie.key] = cookie;
- cb(null);
- }
- updateCookie(oldCookie, newCookie, cb) {
- // updateCookie() may avoid updating cookies that are identical. For example,
- // lastAccessed may not be important to some stores and an equality
- // comparison could exclude that field.
- this.putCookie(newCookie, cb);
+
+ this[_handle].on('error', er => this[_onError](new ZlibError(er)))
+ this.once('end', () => this.close)
}
- removeCookie(domain, path, key, cb) {
- if (
- this.idx[domain] &&
- this.idx[domain][path] &&
- this.idx[domain][path][key]
- ) {
- delete this.idx[domain][path][key];
+
+ close () {
+ if (this[_handle]) {
+ this[_handle].close()
+ this[_handle] = null
+ this.emit('close')
}
- cb(null);
}
- removeCookies(domain, path, cb) {
- if (this.idx[domain]) {
- if (path) {
- delete this.idx[domain][path];
- } else {
- delete this.idx[domain];
- }
+
+ reset () {
+ if (!this[_sawError]) {
+ assert(this[_handle], 'zlib binding closed')
+ return this[_handle].reset()
}
- return cb(null);
- }
- removeAllCookies(cb) {
- this.idx = {};
- return cb(null);
}
- getAllCookies(cb) {
- const cookies = [];
- const idx = this.idx;
-
- const domains = Object.keys(idx);
- domains.forEach(domain => {
- const paths = Object.keys(idx[domain]);
- paths.forEach(path => {
- const keys = Object.keys(idx[domain][path]);
- keys.forEach(key => {
- if (key !== null) {
- cookies.push(idx[domain][path][key]);
- }
- });
- });
- });
- // Sort by creationIndex so deserializing retains the creation order.
- // When implementing your own store, this SHOULD retain the order too
- cookies.sort((a, b) => {
- return (a.creationIndex || 0) - (b.creationIndex || 0);
- });
+ flush (flushFlag) {
+ if (this.ended)
+ return
- cb(null, cookies);
+ if (typeof flushFlag !== 'number')
+ flushFlag = this[_fullFlushFlag]
+ this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag }))
}
-}
-
-[
- "findCookie",
- "findCookies",
- "putCookie",
- "updateCookie",
- "removeCookie",
- "removeCookies",
- "removeAllCookies",
- "getAllCookies"
-].forEach(name => {
- MemoryCookieStore[name] = fromCallback(MemoryCookieStore.prototype[name]);
-});
-
-exports.MemoryCookieStore = MemoryCookieStore;
+ end (chunk, encoding, cb) {
+ if (chunk)
+ this.write(chunk, encoding)
+ this.flush(this[_finishFlushFlag])
+ this[_ended] = true
+ return super.end(null, null, cb)
+ }
-/***/ }),
-/* 333 */,
-/* 334 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+ get ended () {
+ return this[_ended]
+ }
-module.exports =
-{
- parallel : __webpack_require__(424),
- serial : __webpack_require__(91),
- serialOrdered : __webpack_require__(892)
-};
+ write (chunk, encoding, cb) {
+ // process the chunk using the sync process
+ // then super.write() all the outputted chunks
+ if (typeof encoding === 'function')
+ cb = encoding, encoding = 'utf8'
+ if (typeof chunk === 'string')
+ chunk = Buffer.from(chunk, encoding)
-/***/ }),
-/* 335 */,
-/* 336 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+ if (this[_sawError])
+ return
+ assert(this[_handle], 'zlib binding closed')
-"use strict";
+ // _processChunk tries to .close() the native handle after it's done, so we
+ // intercept that by temporarily making it a no-op.
+ const nativeHandle = this[_handle]._handle
+ const originalNativeClose = nativeHandle.close
+ nativeHandle.close = () => {}
+ const originalClose = this[_handle].close
+ this[_handle].close = () => {}
+ // It also calls `Buffer.concat()` at the end, which may be convenient
+ // for some, but which we are not interested in as it slows us down.
+ Buffer.concat = (args) => args
+ let result
+ try {
+ const flushFlag = typeof chunk[_flushFlag] === 'number'
+ ? chunk[_flushFlag] : this[_flushFlag]
+ result = this[_handle]._processChunk(chunk, flushFlag)
+ // if we don't throw, reset it back how it was
+ Buffer.concat = OriginalBufferConcat
+ } catch (err) {
+ // or if we do, put Buffer.concat() back before we emit error
+ // Error events call into user code, which may call Buffer.concat()
+ Buffer.concat = OriginalBufferConcat
+ this[_onError](new ZlibError(err))
+ } finally {
+ if (this[_handle]) {
+ // Core zlib resets `_handle` to null after attempting to close the
+ // native handle. Our no-op handler prevented actual closure, but we
+ // need to restore the `._handle` property.
+ this[_handle]._handle = nativeHandle
+ nativeHandle.close = originalNativeClose
+ this[_handle].close = originalClose
+ // `_processChunk()` adds an 'error' listener. If we don't remove it
+ // after each call, these handlers start piling up.
+ this[_handle].removeAllListeners('error')
+ }
+ }
-var MurmurHash3 = __webpack_require__(188)
+ let writeReturn
+ if (result) {
+ if (Array.isArray(result) && result.length > 0) {
+ // The first buffer is always `handle._outBuffer`, which would be
+ // re-used for later invocations; so, we always have to copy that one.
+ writeReturn = super.write(Buffer.from(result[0]))
+ for (let i = 1; i < result.length; i++) {
+ writeReturn = super.write(result[i])
+ }
+ } else {
+ writeReturn = super.write(Buffer.from(result))
+ }
+ }
-module.exports = function (uniq) {
- if (uniq) {
- var hash = new MurmurHash3(uniq)
- return ('00000000' + hash.result().toString(16)).substr(-8)
- } else {
- return (Math.random().toString(16) + '0000000').substr(2, 8)
+ if (cb)
+ cb()
+ return writeReturn
}
}
+class Zlib extends ZlibBase {
+ constructor (opts, mode) {
+ opts = opts || {}
-/***/ }),
-/* 337 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-
-"use strict";
-/*!
- * humanize-ms - index.js
- * Copyright(c) 2014 dead_horse
- * MIT Licensed
- */
+ opts.flush = opts.flush || constants.Z_NO_FLUSH
+ opts.finishFlush = opts.finishFlush || constants.Z_FINISH
+ super(opts, mode)
+ this[_fullFlushFlag] = constants.Z_FULL_FLUSH
+ this[_level] = opts.level
+ this[_strategy] = opts.strategy
+ }
+ params (level, strategy) {
+ if (this[_sawError])
+ return
-/**
- * Module dependencies.
- */
+ if (!this[_handle])
+ throw new Error('cannot switch params when binding is closed')
-var util = __webpack_require__(669);
-var ms = __webpack_require__(527);
+ // no way to test this without also not supporting params at all
+ /* istanbul ignore if */
+ if (!this[_handle].params)
+ throw new Error('not supported in this implementation')
-module.exports = function (t) {
- if (typeof t === 'number') return t;
- var r = ms(t);
- if (r === undefined) {
- var err = new Error(util.format('humanize-ms(%j) result undefined', t));
- console.warn(err.stack);
+ if (this[_level] !== level || this[_strategy] !== strategy) {
+ this.flush(constants.Z_SYNC_FLUSH)
+ assert(this[_handle], 'zlib binding closed')
+ // .params() calls .flush(), but the latter is always async in the
+ // core zlib. We override .flush() temporarily to intercept that and
+ // flush synchronously.
+ const origFlush = this[_handle].flush
+ this[_handle].flush = (flushFlag, cb) => {
+ this.flush(flushFlag)
+ cb()
+ }
+ try {
+ this[_handle].params(level, strategy)
+ } finally {
+ this[_handle].flush = origFlush
+ }
+ /* istanbul ignore else */
+ if (this[_handle]) {
+ this[_level] = level
+ this[_strategy] = strategy
+ }
+ }
}
- return r;
-};
-
-
-/***/ }),
-/* 338 */
-/***/ (function(__unusedmodule, exports) {
-
-"use strict";
-/*!
- * Copyright (c) 2015, Salesforce.com, Inc.
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
- *
- * 2. Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
- *
- * 3. Neither the name of Salesforce.com nor the names of its contributors may
- * be used to endorse or promote products derived from this software without
- * specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
- * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
- * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
- * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
- * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
- * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
- * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
- * POSSIBILITY OF SUCH DAMAGE.
- */
+}
-/*jshint unused:false */
+// minimal 2-byte header
+class Deflate extends Zlib {
+ constructor (opts) {
+ super(opts, 'Deflate')
+ }
+}
-class Store {
- constructor() {
- this.synchronous = false;
+class Inflate extends Zlib {
+ constructor (opts) {
+ super(opts, 'Inflate')
}
+}
- findCookie(domain, path, key, cb) {
- throw new Error("findCookie is not implemented");
+// gzip - bigger header, same deflate compression
+class Gzip extends Zlib {
+ constructor (opts) {
+ super(opts, 'Gzip')
}
+}
- findCookies(domain, path, allowSpecialUseDomain, cb) {
- throw new Error("findCookies is not implemented");
+class Gunzip extends Zlib {
+ constructor (opts) {
+ super(opts, 'Gunzip')
}
+}
- putCookie(cookie, cb) {
- throw new Error("putCookie is not implemented");
+// raw - no header
+class DeflateRaw extends Zlib {
+ constructor (opts) {
+ super(opts, 'DeflateRaw')
}
+}
- updateCookie(oldCookie, newCookie, cb) {
- // recommended default implementation:
- // return this.putCookie(newCookie, cb);
- throw new Error("updateCookie is not implemented");
+class InflateRaw extends Zlib {
+ constructor (opts) {
+ super(opts, 'InflateRaw')
}
+}
- removeCookie(domain, path, key, cb) {
- throw new Error("removeCookie is not implemented");
+// auto-detect header.
+class Unzip extends Zlib {
+ constructor (opts) {
+ super(opts, 'Unzip')
}
+}
- removeCookies(domain, path, cb) {
- throw new Error("removeCookies is not implemented");
+class Brotli extends ZlibBase {
+ constructor (opts, mode) {
+ opts = opts || {}
+
+ opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS
+ opts.finishFlush = opts.finishFlush || constants.BROTLI_OPERATION_FINISH
+
+ super(opts, mode)
+
+ this[_fullFlushFlag] = constants.BROTLI_OPERATION_FLUSH
}
+}
- removeAllCookies(cb) {
- throw new Error("removeAllCookies is not implemented");
+class BrotliCompress extends Brotli {
+ constructor (opts) {
+ super(opts, 'BrotliCompress')
}
+}
- getAllCookies(cb) {
- throw new Error(
- "getAllCookies is not implemented (therefore jar cannot be serialized)"
- );
+class BrotliDecompress extends Brotli {
+ constructor (opts) {
+ super(opts, 'BrotliDecompress')
}
}
-exports.Store = Store;
+exports.Deflate = Deflate
+exports.Inflate = Inflate
+exports.Gzip = Gzip
+exports.Gunzip = Gunzip
+exports.DeflateRaw = DeflateRaw
+exports.InflateRaw = InflateRaw
+exports.Unzip = Unzip
+/* istanbul ignore else */
+if (typeof realZlib.BrotliCompress === 'function') {
+ exports.BrotliCompress = BrotliCompress
+ exports.BrotliDecompress = BrotliDecompress
+} else {
+ exports.BrotliCompress = exports.BrotliDecompress = class {
+ constructor () {
+ throw new Error('Brotli is not supported in this version of Node.js')
+ }
+ }
+}
/***/ }),
-/* 339 */,
-/* 340 */
-/***/ (function(__unusedmodule, exports) {
+/* 269 */
+/***/ (function(module, exports, __webpack_require__) {
"use strict";
-/*
- * Copyright The OpenTelemetry Authors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * https://www.apache.org/licenses/LICENSE-2.0
+
+/**
+ * index.js
*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
+ * a request API compatible with window.fetch
*/
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.SamplingDecision = void 0;
+
+const url = __webpack_require__(835)
+const http = __webpack_require__(605)
+const https = __webpack_require__(211)
+const zlib = __webpack_require__(761)
+const PassThrough = __webpack_require__(794).PassThrough
+
+const Body = __webpack_require__(542)
+const writeToStream = Body.writeToStream
+const Response = __webpack_require__(901)
+const Headers = __webpack_require__(68)
+const Request = __webpack_require__(988)
+const getNodeRequestOptions = Request.getNodeRequestOptions
+const FetchError = __webpack_require__(888)
+const isURL = /^https?:/
+
/**
- * A sampling decision that determines how a {@link Span} will be recorded
- * and collected.
+ * Fetch function
+ *
+ * @param Mixed url Absolute url or Request instance
+ * @param Object opts Fetch options
+ * @return Promise
*/
-var SamplingDecision;
-(function (SamplingDecision) {
- /**
- * `Span.isRecording() === false`, span will not be recorded and all events
- * and attributes will be dropped.
- */
- SamplingDecision[SamplingDecision["NOT_RECORD"] = 0] = "NOT_RECORD";
- /**
- * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags}
- * MUST NOT be set.
- */
- SamplingDecision[SamplingDecision["RECORD"] = 1] = "RECORD";
- /**
- * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags}
- * MUST be set.
- */
- SamplingDecision[SamplingDecision["RECORD_AND_SAMPLED"] = 2] = "RECORD_AND_SAMPLED";
-})(SamplingDecision = exports.SamplingDecision || (exports.SamplingDecision = {}));
-//# sourceMappingURL=SamplingResult.js.map
+exports = module.exports = fetch
+function fetch (uri, opts) {
+ // allow custom promise
+ if (!fetch.Promise) {
+ throw new Error('native promise missing, set fetch.Promise to your favorite alternative')
+ }
-/***/ }),
-/* 341 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+ Body.Promise = fetch.Promise
-"use strict";
+ // wrap http.request into fetch
+ return new fetch.Promise((resolve, reject) => {
+ // build request object
+ const request = new Request(uri, opts)
+ const options = getNodeRequestOptions(request)
-const stripAnsi = __webpack_require__(569);
-const isFullwidthCodePoint = __webpack_require__(97);
+ const send = (options.protocol === 'https:' ? https : http).request
-module.exports = str => {
- if (typeof str !== 'string' || str.length === 0) {
- return 0;
- }
+ // http.request only support string as host header, this hack make custom host header possible
+ if (options.headers.host) {
+ options.headers.host = options.headers.host[0]
+ }
- str = stripAnsi(str);
+ // send request
+ const req = send(options)
+ let reqTimeout
- let width = 0;
+ if (request.timeout) {
+ req.once('socket', socket => {
+ reqTimeout = setTimeout(() => {
+ req.abort()
+ reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'))
+ }, request.timeout)
+ })
+ }
- for (let i = 0; i < str.length; i++) {
- const code = str.codePointAt(i);
+ req.on('error', err => {
+ clearTimeout(reqTimeout)
+ reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err))
+ })
- // Ignore control characters
- if (code <= 0x1F || (code >= 0x7F && code <= 0x9F)) {
- continue;
- }
+ req.on('response', res => {
+ clearTimeout(reqTimeout)
- // Ignore combining characters
- if (code >= 0x300 && code <= 0x36F) {
- continue;
- }
+ // handle redirect
+ if (fetch.isRedirect(res.statusCode) && request.redirect !== 'manual') {
+ if (request.redirect === 'error') {
+ reject(new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect'))
+ return
+ }
- // Surrogates
- if (code > 0xFFFF) {
- i++;
- }
+ if (request.counter >= request.follow) {
+ reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'))
+ return
+ }
- width += isFullwidthCodePoint(code) ? 2 : 1;
- }
+ if (!res.headers.location) {
+ reject(new FetchError(`redirect location header missing at: ${request.url}`, 'invalid-redirect'))
+ return
+ }
- return width;
-};
+ // Comment and logic below is used under the following license:
+ // Copyright (c) 2010-2012 Mikeal Rogers
+ // Licensed under the Apache License, Version 2.0 (the "License");
+ // you may not use this file except in compliance with the License.
+ // You may obtain a copy of the License at
+ // http://www.apache.org/licenses/LICENSE-2.0
+ // Unless required by applicable law or agreed to in writing,
+ // software distributed under the License is distributed on an "AS
+ // IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ // express or implied. See the License for the specific language
+ // governing permissions and limitations under the License.
+ // Remove authorization if changing hostnames (but not if just
+ // changing ports or protocols). This matches the behavior of request:
+ // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138
+ const resolvedUrl = url.resolve(request.url, res.headers.location)
+ let redirectURL = ''
+ if (!isURL.test(res.headers.location)) {
+ redirectURL = url.parse(resolvedUrl)
+ } else {
+ redirectURL = url.parse(res.headers.location)
+ }
+ if (url.parse(request.url).hostname !== redirectURL.hostname) {
+ request.headers.delete('authorization')
+ }
-/***/ }),
-/* 342 */,
-/* 343 */
-/***/ (function(module) {
+ // per fetch spec, for POST request with 301/302 response, or any request with 303 response, use GET when following redirect
+ if (res.statusCode === 303 ||
+ ((res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST')) {
+ request.method = 'GET'
+ request.body = null
+ request.headers.delete('content-length')
+ }
-module.exports = require("timers");
+ request.counter++
-/***/ }),
-/* 344 */,
-/* 345 */,
-/* 346 */,
-/* 347 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+ resolve(fetch(resolvedUrl, request))
+ return
+ }
-// Generated by CoffeeScript 1.12.7
-(function() {
- var XMLStringWriter, XMLWriterBase,
- extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; },
- hasProp = {}.hasOwnProperty;
+ // normalize location header for manual redirect mode
+ const headers = new Headers()
+ for (const name of Object.keys(res.headers)) {
+ if (Array.isArray(res.headers[name])) {
+ for (const val of res.headers[name]) {
+ headers.append(name, val)
+ }
+ } else {
+ headers.append(name, res.headers[name])
+ }
+ }
+ if (request.redirect === 'manual' && headers.has('location')) {
+ headers.set('location', url.resolve(request.url, headers.get('location')))
+ }
- XMLWriterBase = __webpack_require__(423);
+ // prepare response
+ let body = res.pipe(new PassThrough())
+ const responseOptions = {
+ url: request.url,
+ status: res.statusCode,
+ statusText: res.statusMessage,
+ headers: headers,
+ size: request.size,
+ timeout: request.timeout
+ }
- module.exports = XMLStringWriter = (function(superClass) {
- extend(XMLStringWriter, superClass);
+ // HTTP-network fetch step 16.1.2
+ const codings = headers.get('Content-Encoding')
- function XMLStringWriter(options) {
- XMLStringWriter.__super__.constructor.call(this, options);
- }
+ // HTTP-network fetch step 16.1.3: handle content codings
- XMLStringWriter.prototype.document = function(doc, options) {
- var child, i, len, r, ref;
- options = this.filterOptions(options);
- r = '';
- ref = doc.children;
- for (i = 0, len = ref.length; i < len; i++) {
- child = ref[i];
- r += this.writeChildNode(child, options, 0);
- }
- if (options.pretty && r.slice(-options.newline.length) === options.newline) {
- r = r.slice(0, -options.newline.length);
+ // in following scenarios we ignore compression support
+ // 1. compression support is disabled
+ // 2. HEAD request
+ // 3. no Content-Encoding header
+ // 4. no content response (204)
+ // 5. content not modified response (304)
+ if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {
+ resolve(new Response(body, responseOptions))
+ return
}
- return r;
- };
- return XMLStringWriter;
+ // Be less strict when decoding compressed responses, since sometimes
+ // servers send slightly invalid responses that are still accepted
+ // by common browsers.
+ // Always using Z_SYNC_FLUSH is what cURL does.
+ const zlibOptions = {
+ flush: zlib.Z_SYNC_FLUSH,
+ finishFlush: zlib.Z_SYNC_FLUSH
+ }
- })(XMLWriterBase);
+ // for gzip
+ if (codings === 'gzip' || codings === 'x-gzip') {
+ body = body.pipe(zlib.createGunzip(zlibOptions))
+ resolve(new Response(body, responseOptions))
+ return
+ }
-}).call(this);
+ // for deflate
+ if (codings === 'deflate' || codings === 'x-deflate') {
+ // handle the infamous raw deflate response from old servers
+ // a hack for old IIS and Apache servers
+ const raw = res.pipe(new PassThrough())
+ raw.once('data', chunk => {
+ // see http://stackoverflow.com/questions/37519828
+ if ((chunk[0] & 0x0F) === 0x08) {
+ body = body.pipe(zlib.createInflate(zlibOptions))
+ } else {
+ body = body.pipe(zlib.createInflateRaw(zlibOptions))
+ }
+ resolve(new Response(body, responseOptions))
+ })
+ return
+ }
+ // otherwise, use response as-is
+ resolve(new Response(body, responseOptions))
+ })
-/***/ }),
-/* 348 */
-/***/ (function(__unusedmodule, exports) {
+ writeToStream(req, request)
+ })
+};
-"use strict";
-/*!
- * Copyright (c) 2015, Salesforce.com, Inc.
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
- *
- * 2. Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
- *
- * 3. Neither the name of Salesforce.com nor the names of its contributors may
- * be used to endorse or promote products derived from this software without
- * specific prior written permission.
+/**
+ * Redirect code matching
*
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
- * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
- * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
- * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
- * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
- * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
- * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
- * POSSIBILITY OF SUCH DAMAGE.
- */
-
-/*
- * "A request-path path-matches a given cookie-path if at least one of the
- * following conditions holds:"
+ * @param Number code Status code
+ * @return Boolean
*/
-function pathMatch(reqPath, cookiePath) {
- // "o The cookie-path and the request-path are identical."
- if (cookiePath === reqPath) {
- return true;
- }
-
- const idx = reqPath.indexOf(cookiePath);
- if (idx === 0) {
- // "o The cookie-path is a prefix of the request-path, and the last
- // character of the cookie-path is %x2F ("/")."
- if (cookiePath.substr(-1) === "/") {
- return true;
- }
-
- // " o The cookie-path is a prefix of the request-path, and the first
- // character of the request-path that is not included in the cookie- path
- // is a %x2F ("/") character."
- if (reqPath.substr(cookiePath.length, 1) === "/") {
- return true;
- }
- }
-
- return false;
-}
+fetch.isRedirect = code => code === 301 || code === 302 || code === 303 || code === 307 || code === 308
-exports.pathMatch = pathMatch;
+// expose Promise
+fetch.Promise = global.Promise
+exports.Headers = Headers
+exports.Request = Request
+exports.Response = Response
+exports.FetchError = FetchError
/***/ }),
-/* 349 */,
-/* 350 */
-/***/ (function(__unusedmodule, exports) {
+/* 270 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
-// Generated by CoffeeScript 1.12.7
-(function() {
- "use strict";
- var prefixMatch;
+"use strict";
- prefixMatch = new RegExp(/(?!xmlns)^.*:/);
- exports.normalize = function(str) {
- return str.toLowerCase();
- };
+module.exports = __webpack_require__(789)
- exports.firstCharLowerCase = function(str) {
- return str.charAt(0).toLowerCase() + str.slice(1);
- };
- exports.stripPrefix = function(str) {
- return str.replace(prefixMatch, '');
- };
+/***/ }),
+/* 271 */
+/***/ (function(module) {
- exports.parseNumbers = function(str) {
- if (!isNaN(str)) {
- str = str % 1 === 0 ? parseInt(str, 10) : parseFloat(str);
- }
- return str;
- };
+"use strict";
- exports.parseBooleans = function(str) {
- if (/^(?:true|false)$/i.test(str)) {
- str = str.toLowerCase() === 'true';
- }
- return str;
- };
-}).call(this);
+module.exports = hashToSegments
+
+function hashToSegments (hash) {
+ return [
+ hash.slice(0, 2),
+ hash.slice(2, 4),
+ hash.slice(4)
+ ]
+}
/***/ }),
-/* 351 */
+/* 272 */
/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
-var es5 = __webpack_require__(883);
-var Objectfreeze = es5.freeze;
+module.exports = function(Promise, Context,
+ enableAsyncHooks, disableAsyncHooks) {
+var async = Promise._async;
+var Warning = __webpack_require__(351).Warning;
var util = __webpack_require__(248);
-var inherits = util.inherits;
-var notEnumerableProp = util.notEnumerableProp;
+var es5 = __webpack_require__(883);
+var canAttachTrace = util.canAttachTrace;
+var unhandledRejectionHandled;
+var possiblyUnhandledRejection;
+var bluebirdFramePattern =
+ /[\\\/]bluebird[\\\/]js[\\\/](release|debug|instrumented)/;
+var nodeFramePattern = /\((?:timers\.js):\d+:\d+\)/;
+var parseLinePattern = /[\/<\(](.+?):(\d+):(\d+)\)?\s*$/;
+var stackFramePattern = null;
+var formatStack = null;
+var indentStackFrames = false;
+var printWarning;
+var debugging = !!(util.env("BLUEBIRD_DEBUG") != 0 &&
+ ( false ||
+ util.env("BLUEBIRD_DEBUG") ||
+ util.env("NODE_ENV") === "development"));
-function subError(nameProperty, defaultMessage) {
- function SubError(message) {
- if (!(this instanceof SubError)) return new SubError(message);
- notEnumerableProp(this, "message",
- typeof message === "string" ? message : defaultMessage);
- notEnumerableProp(this, "name", nameProperty);
- if (Error.captureStackTrace) {
- Error.captureStackTrace(this, this.constructor);
- } else {
- Error.call(this);
- }
- }
- inherits(SubError, Error);
- return SubError;
-}
+var warnings = !!(util.env("BLUEBIRD_WARNINGS") != 0 &&
+ (debugging || util.env("BLUEBIRD_WARNINGS")));
-var _TypeError, _RangeError;
-var Warning = subError("Warning", "warning");
-var CancellationError = subError("CancellationError", "cancellation error");
-var TimeoutError = subError("TimeoutError", "timeout error");
-var AggregateError = subError("AggregateError", "aggregate error");
-try {
- _TypeError = TypeError;
- _RangeError = RangeError;
-} catch(e) {
- _TypeError = subError("TypeError", "type error");
- _RangeError = subError("RangeError", "range error");
-}
+var longStackTraces = !!(util.env("BLUEBIRD_LONG_STACK_TRACES") != 0 &&
+ (debugging || util.env("BLUEBIRD_LONG_STACK_TRACES")));
-var methods = ("join pop push shift unshift slice filter forEach some " +
- "every map indexOf lastIndexOf reduce reduceRight sort reverse").split(" ");
+var wForgottenReturn = util.env("BLUEBIRD_W_FORGOTTEN_RETURN") != 0 &&
+ (warnings || !!util.env("BLUEBIRD_W_FORGOTTEN_RETURN"));
-for (var i = 0; i < methods.length; ++i) {
- if (typeof Array.prototype[methods[i]] === "function") {
- AggregateError.prototype[methods[i]] = Array.prototype[methods[i]];
- }
-}
+var deferUnhandledRejectionCheck;
+(function() {
+ var promises = [];
-es5.defineProperty(AggregateError.prototype, "length", {
- value: 0,
- configurable: false,
- writable: true,
- enumerable: true
-});
-AggregateError.prototype["isOperational"] = true;
-var level = 0;
-AggregateError.prototype.toString = function() {
- var indent = Array(level * 4 + 1).join(" ");
- var ret = "\n" + indent + "AggregateError of:" + "\n";
- level++;
- indent = Array(level * 4 + 1).join(" ");
- for (var i = 0; i < this.length; ++i) {
- var str = this[i] === this ? "[Circular AggregateError]" : this[i] + "";
- var lines = str.split("\n");
- for (var j = 0; j < lines.length; ++j) {
- lines[j] = indent + lines[j];
+ function unhandledRejectionCheck() {
+ for (var i = 0; i < promises.length; ++i) {
+ promises[i]._notifyUnhandledRejection();
}
- str = lines.join("\n");
- ret += str + "\n";
+ unhandledRejectionClear();
}
- level--;
- return ret;
-};
-
-function OperationalError(message) {
- if (!(this instanceof OperationalError))
- return new OperationalError(message);
- notEnumerableProp(this, "name", "OperationalError");
- notEnumerableProp(this, "message", message);
- this.cause = message;
- this["isOperational"] = true;
- if (message instanceof Error) {
- notEnumerableProp(this, "message", message.message);
- notEnumerableProp(this, "stack", message.stack);
- } else if (Error.captureStackTrace) {
- Error.captureStackTrace(this, this.constructor);
+ function unhandledRejectionClear() {
+ promises.length = 0;
}
-}
-inherits(OperationalError, Error);
+ deferUnhandledRejectionCheck = function(promise) {
+ promises.push(promise);
+ setTimeout(unhandledRejectionCheck, 1);
+ };
-var errorTypes = Error["__BluebirdErrorTypes__"];
-if (!errorTypes) {
- errorTypes = Objectfreeze({
- CancellationError: CancellationError,
- TimeoutError: TimeoutError,
- OperationalError: OperationalError,
- RejectionError: OperationalError,
- AggregateError: AggregateError
+ es5.defineProperty(Promise, "_unhandledRejectionCheck", {
+ value: unhandledRejectionCheck
});
- es5.defineProperty(Error, "__BluebirdErrorTypes__", {
- value: errorTypes,
- writable: false,
- enumerable: false,
- configurable: false
+ es5.defineProperty(Promise, "_unhandledRejectionClear", {
+ value: unhandledRejectionClear
});
-}
+})();
-module.exports = {
- Error: Error,
- TypeError: _TypeError,
- RangeError: _RangeError,
- CancellationError: errorTypes.CancellationError,
- OperationalError: errorTypes.OperationalError,
- TimeoutError: errorTypes.TimeoutError,
- AggregateError: errorTypes.AggregateError,
- Warning: Warning
+Promise.prototype.suppressUnhandledRejections = function() {
+ var target = this._target();
+ target._bitField = ((target._bitField & (~1048576)) |
+ 524288);
};
+Promise.prototype._ensurePossibleRejectionHandled = function () {
+ if ((this._bitField & 524288) !== 0) return;
+ this._setRejectionIsUnhandled();
+ deferUnhandledRejectionCheck(this);
+};
-/***/ }),
-/* 352 */,
-/* 353 */,
-/* 354 */
-/***/ (function(module) {
-
-"use strict";
-
+Promise.prototype._notifyUnhandledRejectionIsHandled = function () {
+ fireRejectionEvent("rejectionHandled",
+ unhandledRejectionHandled, undefined, this);
+};
-const LEVELS = [
- 'notice',
- 'error',
- 'warn',
- 'info',
- 'verbose',
- 'http',
- 'silly',
- 'pause',
- 'resume'
-]
+Promise.prototype._setReturnedNonUndefined = function() {
+ this._bitField = this._bitField | 268435456;
+};
-const logger = {}
-for (const level of LEVELS) {
- logger[level] = log(level)
-}
-module.exports = logger
+Promise.prototype._returnedNonUndefined = function() {
+ return (this._bitField & 268435456) !== 0;
+};
-function log (level) {
- return (category, ...args) => process.emit('log', level, category, ...args)
-}
+Promise.prototype._notifyUnhandledRejection = function () {
+ if (this._isRejectionUnhandled()) {
+ var reason = this._settledValue();
+ this._setUnhandledRejectionIsNotified();
+ fireRejectionEvent("unhandledRejection",
+ possiblyUnhandledRejection, reason, this);
+ }
+};
+Promise.prototype._setUnhandledRejectionIsNotified = function () {
+ this._bitField = this._bitField | 262144;
+};
-/***/ }),
-/* 355 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+Promise.prototype._unsetUnhandledRejectionIsNotified = function () {
+ this._bitField = this._bitField & (~262144);
+};
-module.exports = {
- publish: __webpack_require__(395),
- unpublish: __webpack_require__(368)
-}
+Promise.prototype._isUnhandledRejectionNotified = function () {
+ return (this._bitField & 262144) > 0;
+};
+Promise.prototype._setRejectionIsUnhandled = function () {
+ this._bitField = this._bitField | 1048576;
+};
-/***/ }),
-/* 356 */
-/***/ (function(module) {
+Promise.prototype._unsetRejectionIsUnhandled = function () {
+ this._bitField = this._bitField & (~1048576);
+ if (this._isUnhandledRejectionNotified()) {
+ this._unsetUnhandledRejectionIsNotified();
+ this._notifyUnhandledRejectionIsHandled();
+ }
+};
-"use strict";
+Promise.prototype._isRejectionUnhandled = function () {
+ return (this._bitField & 1048576) > 0;
+};
-// this exists so we can replace it during testing
-module.exports = process
+Promise.prototype._warn = function(message, shouldUseOwnTrace, promise) {
+ return warn(message, shouldUseOwnTrace, promise || this);
+};
+Promise.onPossiblyUnhandledRejection = function (fn) {
+ var context = Promise._getContext();
+ possiblyUnhandledRejection = util.contextBind(context, fn);
+};
-/***/ }),
-/* 357 */
-/***/ (function(module) {
+Promise.onUnhandledRejectionHandled = function (fn) {
+ var context = Promise._getContext();
+ unhandledRejectionHandled = util.contextBind(context, fn);
+};
-module.exports = require("assert");
+var disableLongStackTraces = function() {};
+Promise.longStackTraces = function () {
+ if (async.haveItemsQueued() && !config.longStackTraces) {
+ throw new Error("cannot enable long stack traces after promises have been created\u000a\u000a See http://goo.gl/MqrFmX\u000a");
+ }
+ if (!config.longStackTraces && longStackTracesIsSupported()) {
+ var Promise_captureStackTrace = Promise.prototype._captureStackTrace;
+ var Promise_attachExtraTrace = Promise.prototype._attachExtraTrace;
+ var Promise_dereferenceTrace = Promise.prototype._dereferenceTrace;
+ config.longStackTraces = true;
+ disableLongStackTraces = function() {
+ if (async.haveItemsQueued() && !config.longStackTraces) {
+ throw new Error("cannot enable long stack traces after promises have been created\u000a\u000a See http://goo.gl/MqrFmX\u000a");
+ }
+ Promise.prototype._captureStackTrace = Promise_captureStackTrace;
+ Promise.prototype._attachExtraTrace = Promise_attachExtraTrace;
+ Promise.prototype._dereferenceTrace = Promise_dereferenceTrace;
+ Context.deactivateLongStackTraces();
+ config.longStackTraces = false;
+ };
+ Promise.prototype._captureStackTrace = longStackTracesCaptureStackTrace;
+ Promise.prototype._attachExtraTrace = longStackTracesAttachExtraTrace;
+ Promise.prototype._dereferenceTrace = longStackTracesDereferenceTrace;
+ Context.activateLongStackTraces();
+ }
+};
-/***/ }),
-/* 358 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+Promise.hasLongStackTraces = function () {
+ return config.longStackTraces && longStackTracesIsSupported();
+};
-"use strict";
+var legacyHandlers = {
+ unhandledrejection: {
+ before: function() {
+ var ret = util.global.onunhandledrejection;
+ util.global.onunhandledrejection = null;
+ return ret;
+ },
+ after: function(fn) {
+ util.global.onunhandledrejection = fn;
+ }
+ },
+ rejectionhandled: {
+ before: function() {
+ var ret = util.global.onrejectionhandled;
+ util.global.onrejectionhandled = null;
+ return ret;
+ },
+ after: function(fn) {
+ util.global.onrejectionhandled = fn;
+ }
+ }
+};
-// A module for chowning things we just created, to preserve
-// ownership of new links and directories.
+var fireDomEvent = (function() {
+ var dispatch = function(legacy, e) {
+ if (legacy) {
+ var fn;
+ try {
+ fn = legacy.before();
+ return !util.global.dispatchEvent(e);
+ } finally {
+ legacy.after(fn);
+ }
+ } else {
+ return !util.global.dispatchEvent(e);
+ }
+ };
+ try {
+ if (typeof CustomEvent === "function") {
+ var event = new CustomEvent("CustomEvent");
+ util.global.dispatchEvent(event);
+ return function(name, event) {
+ name = name.toLowerCase();
+ var eventData = {
+ detail: event,
+ cancelable: true
+ };
+ var domEvent = new CustomEvent(name, eventData);
+ es5.defineProperty(
+ domEvent, "promise", {value: event.promise});
+ es5.defineProperty(
+ domEvent, "reason", {value: event.reason});
-const chownr = __webpack_require__(941)
+ return dispatch(legacyHandlers[name], domEvent);
+ };
+ } else if (typeof Event === "function") {
+ var event = new Event("CustomEvent");
+ util.global.dispatchEvent(event);
+ return function(name, event) {
+ name = name.toLowerCase();
+ var domEvent = new Event(name, {
+ cancelable: true
+ });
+ domEvent.detail = event;
+ es5.defineProperty(domEvent, "promise", {value: event.promise});
+ es5.defineProperty(domEvent, "reason", {value: event.reason});
+ return dispatch(legacyHandlers[name], domEvent);
+ };
+ } else {
+ var event = document.createEvent("CustomEvent");
+ event.initCustomEvent("testingtheevent", false, true, {});
+ util.global.dispatchEvent(event);
+ return function(name, event) {
+ name = name.toLowerCase();
+ var domEvent = document.createEvent("CustomEvent");
+ domEvent.initCustomEvent(name, false, true,
+ event);
+ return dispatch(legacyHandlers[name], domEvent);
+ };
+ }
+ } catch (e) {}
+ return function() {
+ return false;
+ };
+})();
-const selfOwner = {
- uid: process.getuid && process.getuid(),
- gid: process.getgid && process.getgid()
-}
+var fireGlobalEvent = (function() {
+ if (util.isNode) {
+ return function() {
+ return process.emit.apply(process, arguments);
+ };
+ } else {
+ if (!util.global) {
+ return function() {
+ return false;
+ };
+ }
+ return function(name) {
+ var methodName = "on" + name.toLowerCase();
+ var method = util.global[methodName];
+ if (!method) return false;
+ method.apply(util.global, [].slice.call(arguments, 1));
+ return true;
+ };
+ }
+})();
-module.exports = (path, uid, gid, cb) => {
- if (selfOwner.uid !== 0 ||
- uid === undefined || gid === undefined ||
- (selfOwner.uid === uid && selfOwner.gid === gid)) {
- // don't need to, or can't chown anyway, so just leave it.
- // this also handles platforms where process.getuid is undefined
- return cb()
- }
- chownr(path, uid, gid, cb)
+function generatePromiseLifecycleEventObject(name, promise) {
+ return {promise: promise};
}
-module.exports.selfOwner = selfOwner
-
+var eventToObjectGenerator = {
+ promiseCreated: generatePromiseLifecycleEventObject,
+ promiseFulfilled: generatePromiseLifecycleEventObject,
+ promiseRejected: generatePromiseLifecycleEventObject,
+ promiseResolved: generatePromiseLifecycleEventObject,
+ promiseCancelled: generatePromiseLifecycleEventObject,
+ promiseChained: function(name, promise, child) {
+ return {promise: promise, child: child};
+ },
+ warning: function(name, warning) {
+ return {warning: warning};
+ },
+ unhandledRejection: function (name, reason, promise) {
+ return {reason: reason, promise: promise};
+ },
+ rejectionHandled: generatePromiseLifecycleEventObject
+};
-/***/ }),
-/* 359 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
+var activeFireEvent = function (name) {
+ var globalEventFired = false;
+ try {
+ globalEventFired = fireGlobalEvent.apply(null, arguments);
+ } catch (e) {
+ async.throwLater(e);
+ globalEventFired = true;
+ }
-"use strict";
+ var domEventFired = false;
+ try {
+ domEventFired = fireDomEvent(name,
+ eventToObjectGenerator[name].apply(null, arguments));
+ } catch (e) {
+ async.throwLater(e);
+ domEventFired = true;
+ }
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
+ return domEventFired || globalEventFired;
};
-Object.defineProperty(exports, "__esModule", { value: true });
-const events_1 = __webpack_require__(614);
-const net = __webpack_require__(631);
-const ip = __webpack_require__(769);
-const smart_buffer_1 = __webpack_require__(118);
-const constants_1 = __webpack_require__(206);
-const helpers_1 = __webpack_require__(372);
-const receivebuffer_1 = __webpack_require__(806);
-const util_1 = __webpack_require__(526);
-class SocksClient extends events_1.EventEmitter {
- constructor(options) {
- super();
- this._options = Object.assign({}, options);
- // Validate SocksClientOptions
- helpers_1.validateSocksClientOptions(options);
- // Default state
- this.state = constants_1.SocksClientState.Created;
- }
- /**
- * Creates a new SOCKS connection.
- *
- * Note: Supports callbacks and promises. Only supports the connect command.
- * @param options { SocksClientOptions } Options.
- * @param callback { Function } An optional callback function.
- * @returns { Promise }
- */
- static createConnection(options, callback) {
- // Validate SocksClientOptions
- helpers_1.validateSocksClientOptions(options, ['connect']);
- return new Promise((resolve, reject) => {
- const client = new SocksClient(options);
- client.connect(options.existing_socket);
- client.once('established', (info) => {
- client.removeAllListeners();
- if (typeof callback === 'function') {
- callback(null, info);
- resolve(); // Resolves pending promise (prevents memory leaks).
- }
- else {
- resolve(info);
- }
- });
- // Error occurred, failed to establish connection.
- client.once('error', (err) => {
- client.removeAllListeners();
- if (typeof callback === 'function') {
- callback(err);
- resolve(); // Resolves pending promise (prevents memory leaks).
- }
- else {
- reject(err);
- }
- });
- });
- }
- /**
- * Creates a new SOCKS connection chain to a destination host through 2 or more SOCKS proxies.
- *
- * Note: Supports callbacks and promises. Only supports the connect method.
- * Note: Implemented via createConnection() factory function.
- * @param options { SocksClientChainOptions } Options
- * @param callback { Function } An optional callback function.
- * @returns { Promise }
- */
- static createConnectionChain(options, callback) {
- // Validate SocksClientChainOptions
- helpers_1.validateSocksClientChainOptions(options);
- // Shuffle proxies
- if (options.randomizeChain) {
- util_1.shuffleArray(options.proxies);
+
+Promise.config = function(opts) {
+ opts = Object(opts);
+ if ("longStackTraces" in opts) {
+ if (opts.longStackTraces) {
+ Promise.longStackTraces();
+ } else if (!opts.longStackTraces && Promise.hasLongStackTraces()) {
+ disableLongStackTraces();
}
- return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {
- let sock;
- try {
- for (let i = 0; i < options.proxies.length; i++) {
- const nextProxy = options.proxies[i];
- // If we've reached the last proxy in the chain, the destination is the actual destination, otherwise it's the next proxy.
- const nextDestination = i === options.proxies.length - 1
- ? options.destination
- : {
- host: options.proxies[i + 1].ipaddress,
- port: options.proxies[i + 1].port
- };
- // Creates the next connection in the chain.
- const result = yield SocksClient.createConnection({
- command: 'connect',
- proxy: nextProxy,
- destination: nextDestination
- // Initial connection ignores this as sock is undefined. Subsequent connections re-use the first proxy socket to form a chain.
- });
- // If sock is undefined, assign it here.
- if (!sock) {
- sock = result.socket;
- }
- }
- if (typeof callback === 'function') {
- callback(null, { socket: sock });
- resolve(); // Resolves pending promise (prevents memory leaks).
- }
- else {
- resolve({ socket: sock });
- }
- }
- catch (err) {
- if (typeof callback === 'function') {
- callback(err);
- resolve(); // Resolves pending promise (prevents memory leaks).
- }
- else {
- reject(err);
- }
- }
- }));
}
- /**
- * Creates a SOCKS UDP Frame.
- * @param options
- */
- static createUDPFrame(options) {
- const buff = new smart_buffer_1.SmartBuffer();
- buff.writeUInt16BE(0);
- buff.writeUInt8(options.frameNumber || 0);
- // IPv4/IPv6/Hostname
- if (net.isIPv4(options.remoteHost.host)) {
- buff.writeUInt8(constants_1.Socks5HostType.IPv4);
- buff.writeUInt32BE(ip.toLong(options.remoteHost.host));
- }
- else if (net.isIPv6(options.remoteHost.host)) {
- buff.writeUInt8(constants_1.Socks5HostType.IPv6);
- buff.writeBuffer(ip.toBuffer(options.remoteHost.host));
- }
- else {
- buff.writeUInt8(constants_1.Socks5HostType.Hostname);
- buff.writeUInt8(Buffer.byteLength(options.remoteHost.host));
- buff.writeString(options.remoteHost.host);
+ if ("warnings" in opts) {
+ var warningsOption = opts.warnings;
+ config.warnings = !!warningsOption;
+ wForgottenReturn = config.warnings;
+
+ if (util.isObject(warningsOption)) {
+ if ("wForgottenReturn" in warningsOption) {
+ wForgottenReturn = !!warningsOption.wForgottenReturn;
+ }
}
- // Port
- buff.writeUInt16BE(options.remoteHost.port);
- // Data
- buff.writeBuffer(options.data);
- return buff.toBuffer();
}
- /**
- * Parses a SOCKS UDP frame.
- * @param data
- */
- static parseUDPFrame(data) {
- const buff = smart_buffer_1.SmartBuffer.fromBuffer(data);
- buff.readOffset = 2;
- const frameNumber = buff.readUInt8();
- const hostType = buff.readUInt8();
- let remoteHost;
- if (hostType === constants_1.Socks5HostType.IPv4) {
- remoteHost = ip.fromLong(buff.readUInt32BE());
- }
- else if (hostType === constants_1.Socks5HostType.IPv6) {
- remoteHost = ip.toString(buff.readBuffer(16));
- }
- else {
- remoteHost = buff.readString(buff.readUInt8());
+ if ("cancellation" in opts && opts.cancellation && !config.cancellation) {
+ if (async.haveItemsQueued()) {
+ throw new Error(
+ "cannot enable cancellation after promises are in use");
}
- const remotePort = buff.readUInt16BE();
- return {
- frameNumber,
- remoteHost: {
- host: remoteHost,
- port: remotePort
- },
- data: buff.readBuffer()
- };
- }
- /**
- * Gets the SocksClient internal state.
- */
- get state() {
- return this._state;
+ Promise.prototype._clearCancellationData =
+ cancellationClearCancellationData;
+ Promise.prototype._propagateFrom = cancellationPropagateFrom;
+ Promise.prototype._onCancel = cancellationOnCancel;
+ Promise.prototype._setOnCancel = cancellationSetOnCancel;
+ Promise.prototype._attachCancellationCallback =
+ cancellationAttachCancellationCallback;
+ Promise.prototype._execute = cancellationExecute;
+ propagateFromFunction = cancellationPropagateFrom;
+ config.cancellation = true;
}
- /**
- * Internal state setter. If the SocksClient is in an error state, it cannot be changed to a non error state.
- */
- set state(newState) {
- if (this._state !== constants_1.SocksClientState.Error) {
- this._state = newState;
+ if ("monitoring" in opts) {
+ if (opts.monitoring && !config.monitoring) {
+ config.monitoring = true;
+ Promise.prototype._fireEvent = activeFireEvent;
+ } else if (!opts.monitoring && config.monitoring) {
+ config.monitoring = false;
+ Promise.prototype._fireEvent = defaultFireEvent;
}
}
- /**
- * Starts the connection establishment to the proxy and destination.
- * @param existing_socket Connected socket to use instead of creating a new one (internal use).
- */
- connect(existing_socket) {
- this._onDataReceived = (data) => this.onDataReceived(data);
- this._onClose = () => this.onClose();
- this._onError = (err) => this.onError(err);
- this._onConnect = () => this.onConnect();
- // Start timeout timer (defaults to 30 seconds)
- const timer = setTimeout(() => this.onEstablishedTimeout(), this._options.timeout || constants_1.DEFAULT_TIMEOUT);
- // check whether unref is available as it differs from browser to NodeJS (#33)
- if (timer.unref && typeof timer.unref === 'function') {
- timer.unref();
- }
- // If an existing socket is provided, use it to negotiate SOCKS handshake. Otherwise create a new Socket.
- if (existing_socket) {
- this._socket = existing_socket;
- }
- else {
- this._socket = new net.Socket();
- }
- // Attach Socket error handlers.
- this._socket.once('close', this._onClose);
- this._socket.once('error', this._onError);
- this._socket.once('connect', this._onConnect);
- this._socket.on('data', this._onDataReceived);
- this.state = constants_1.SocksClientState.Connecting;
- this._receiveBuffer = new receivebuffer_1.ReceiveBuffer();
- if (existing_socket) {
- this._socket.emit('connect');
- }
- else {
- this._socket.connect(this.getSocketOptions());
- if (this._options.set_tcp_nodelay !== undefined &&
- this._options.set_tcp_nodelay !== null) {
- this._socket.setNoDelay(!!this._options.set_tcp_nodelay);
+ if ("asyncHooks" in opts && util.nodeSupportsAsyncResource) {
+ var prev = config.asyncHooks;
+ var cur = !!opts.asyncHooks;
+ if (prev !== cur) {
+ config.asyncHooks = cur;
+ if (cur) {
+ enableAsyncHooks();
+ } else {
+ disableAsyncHooks();
}
}
- // Listen for established event so we can re-emit any excess data received during handshakes.
- this.prependOnceListener('established', info => {
- setImmediate(() => {
- if (this._receiveBuffer.length > 0) {
- const excessData = this._receiveBuffer.get(this._receiveBuffer.length);
- info.socket.emit('data', excessData);
- }
- info.socket.resume();
- });
- });
}
- // Socket options (defaults host/port to options.proxy.host/options.proxy.port)
- getSocketOptions() {
- return Object.assign(Object.assign({}, this._options.socket_options), { host: this._options.proxy.host || this._options.proxy.ipaddress, port: this._options.proxy.port });
+ return Promise;
+};
+
+function defaultFireEvent() { return false; }
+
+Promise.prototype._fireEvent = defaultFireEvent;
+Promise.prototype._execute = function(executor, resolve, reject) {
+ try {
+ executor(resolve, reject);
+ } catch (e) {
+ return e;
}
- /**
- * Handles internal Socks timeout callback.
- * Note: If the Socks client is not BoundWaitingForConnection or Established, the connection will be closed.
- */
- onEstablishedTimeout() {
- if (this.state !== constants_1.SocksClientState.Established &&
- this.state !== constants_1.SocksClientState.BoundWaitingForConnection) {
- this._closeSocket(constants_1.ERRORS.ProxyConnectionTimedOut);
+};
+Promise.prototype._onCancel = function () {};
+Promise.prototype._setOnCancel = function (handler) { ; };
+Promise.prototype._attachCancellationCallback = function(onCancel) {
+ ;
+};
+Promise.prototype._captureStackTrace = function () {};
+Promise.prototype._attachExtraTrace = function () {};
+Promise.prototype._dereferenceTrace = function () {};
+Promise.prototype._clearCancellationData = function() {};
+Promise.prototype._propagateFrom = function (parent, flags) {
+ ;
+ ;
+};
+
+function cancellationExecute(executor, resolve, reject) {
+ var promise = this;
+ try {
+ executor(resolve, reject, function(onCancel) {
+ if (typeof onCancel !== "function") {
+ throw new TypeError("onCancel must be a function, got: " +
+ util.toString(onCancel));
+ }
+ promise._attachCancellationCallback(onCancel);
+ });
+ } catch (e) {
+ return e;
+ }
+}
+
+function cancellationAttachCancellationCallback(onCancel) {
+ if (!this._isCancellable()) return this;
+
+ var previousOnCancel = this._onCancel();
+ if (previousOnCancel !== undefined) {
+ if (util.isArray(previousOnCancel)) {
+ previousOnCancel.push(onCancel);
+ } else {
+ this._setOnCancel([previousOnCancel, onCancel]);
}
+ } else {
+ this._setOnCancel(onCancel);
}
- /**
- * Handles Socket connect event.
- */
- onConnect() {
- this.state = constants_1.SocksClientState.Connected;
- // Send initial handshake.
- if (this._options.proxy.type === 4) {
- this.sendSocks4InitialHandshake();
+}
+
+function cancellationOnCancel() {
+ return this._onCancelField;
+}
+
+function cancellationSetOnCancel(onCancel) {
+ this._onCancelField = onCancel;
+}
+
+function cancellationClearCancellationData() {
+ this._cancellationParent = undefined;
+ this._onCancelField = undefined;
+}
+
+function cancellationPropagateFrom(parent, flags) {
+ if ((flags & 1) !== 0) {
+ this._cancellationParent = parent;
+ var branchesRemainingToCancel = parent._branchesRemainingToCancel;
+ if (branchesRemainingToCancel === undefined) {
+ branchesRemainingToCancel = 0;
}
- else {
- this.sendSocks5InitialHandshake();
+ parent._branchesRemainingToCancel = branchesRemainingToCancel + 1;
+ }
+ if ((flags & 2) !== 0 && parent._isBound()) {
+ this._setBoundTo(parent._boundTo);
+ }
+}
+
+function bindingPropagateFrom(parent, flags) {
+ if ((flags & 2) !== 0 && parent._isBound()) {
+ this._setBoundTo(parent._boundTo);
+ }
+}
+var propagateFromFunction = bindingPropagateFrom;
+
+function boundValueFunction() {
+ var ret = this._boundTo;
+ if (ret !== undefined) {
+ if (ret instanceof Promise) {
+ if (ret.isFulfilled()) {
+ return ret.value();
+ } else {
+ return undefined;
+ }
}
- this.state = constants_1.SocksClientState.SentInitialHandshake;
}
- /**
- * Handles Socket data event.
- * @param data
- */
- onDataReceived(data) {
- /*
- All received data is appended to a ReceiveBuffer.
- This makes sure that all the data we need is received before we attempt to process it.
- */
- this._receiveBuffer.append(data);
- // Process data that we have.
- this.processData();
+ return ret;
+}
+
+function longStackTracesCaptureStackTrace() {
+ this._trace = new CapturedTrace(this._peekContext());
+}
+
+function longStackTracesAttachExtraTrace(error, ignoreSelf) {
+ if (canAttachTrace(error)) {
+ var trace = this._trace;
+ if (trace !== undefined) {
+ if (ignoreSelf) trace = trace._parent;
+ }
+ if (trace !== undefined) {
+ trace.attachExtraTrace(error);
+ } else if (!error.__stackCleaned__) {
+ var parsed = parseStackAndMessage(error);
+ util.notEnumerableProp(error, "stack",
+ parsed.message + "\n" + parsed.stack.join("\n"));
+ util.notEnumerableProp(error, "__stackCleaned__", true);
+ }
}
- /**
- * Handles processing of the data we have received.
- */
- processData() {
- // If we have enough data to process the next step in the SOCKS handshake, proceed.
- if (this._receiveBuffer.length >= this._nextRequiredPacketBufferSize) {
- // Sent initial handshake, waiting for response.
- if (this.state === constants_1.SocksClientState.SentInitialHandshake) {
- if (this._options.proxy.type === 4) {
- // Socks v4 only has one handshake response.
- this.handleSocks4FinalHandshakeResponse();
- }
- else {
- // Socks v5 has two handshakes, handle initial one here.
- this.handleInitialSocks5HandshakeResponse();
+}
+
+function longStackTracesDereferenceTrace() {
+ this._trace = undefined;
+}
+
+function checkForgottenReturns(returnValue, promiseCreated, name, promise,
+ parent) {
+ if (returnValue === undefined && promiseCreated !== null &&
+ wForgottenReturn) {
+ if (parent !== undefined && parent._returnedNonUndefined()) return;
+ if ((promise._bitField & 65535) === 0) return;
+
+ if (name) name = name + " ";
+ var handlerLine = "";
+ var creatorLine = "";
+ if (promiseCreated._trace) {
+ var traceLines = promiseCreated._trace.stack.split("\n");
+ var stack = cleanStack(traceLines);
+ for (var i = stack.length - 1; i >= 0; --i) {
+ var line = stack[i];
+ if (!nodeFramePattern.test(line)) {
+ var lineMatches = line.match(parseLinePattern);
+ if (lineMatches) {
+ handlerLine = "at " + lineMatches[1] +
+ ":" + lineMatches[2] + ":" + lineMatches[3] + " ";
+ }
+ break;
}
- // Sent auth request for Socks v5, waiting for response.
- }
- else if (this.state === constants_1.SocksClientState.SentAuthentication) {
- this.handleInitialSocks5AuthenticationHandshakeResponse();
- // Sent final Socks v5 handshake, waiting for final response.
- }
- else if (this.state === constants_1.SocksClientState.SentFinalHandshake) {
- this.handleSocks5FinalHandshakeResponse();
- // Socks BIND established. Waiting for remote connection via proxy.
}
- else if (this.state === constants_1.SocksClientState.BoundWaitingForConnection) {
- if (this._options.proxy.type === 4) {
- this.handleSocks4IncomingConnectionResponse();
- }
- else {
- this.handleSocks5IncomingConnectionResponse();
+
+ if (stack.length > 0) {
+ var firstUserLine = stack[0];
+ for (var i = 0; i < traceLines.length; ++i) {
+
+ if (traceLines[i] === firstUserLine) {
+ if (i > 0) {
+ creatorLine = "\n" + traceLines[i - 1];
+ }
+ break;
+ }
}
- }
- else if (this.state === constants_1.SocksClientState.Established) {
- // do nothing (prevents closing of the socket)
- }
- else {
- this._closeSocket(constants_1.ERRORS.InternalError);
+
}
}
+ var msg = "a promise was created in a " + name +
+ "handler " + handlerLine + "but was not returned from it, " +
+ "see http://goo.gl/rRqMUw" +
+ creatorLine;
+ promise._warn(msg, true, promiseCreated);
}
- /**
- * Handles Socket close event.
- * @param had_error
- */
- onClose() {
- this._closeSocket(constants_1.ERRORS.SocketClosed);
+}
+
+function deprecated(name, replacement) {
+ var message = name +
+ " is deprecated and will be removed in a future version.";
+ if (replacement) message += " Use " + replacement + " instead.";
+ return warn(message);
+}
+
+function warn(message, shouldUseOwnTrace, promise) {
+ if (!config.warnings) return;
+ var warning = new Warning(message);
+ var ctx;
+ if (shouldUseOwnTrace) {
+ promise._attachExtraTrace(warning);
+ } else if (config.longStackTraces && (ctx = Promise._peekContext())) {
+ ctx.attachExtraTrace(warning);
+ } else {
+ var parsed = parseStackAndMessage(warning);
+ warning.stack = parsed.message + "\n" + parsed.stack.join("\n");
}
- /**
- * Handles Socket error event.
- * @param err
- */
- onError(err) {
- this._closeSocket(err.message);
+
+ if (!activeFireEvent("warning", warning)) {
+ formatAndLogError(warning, "", true);
}
- /**
- * Removes internal event listeners on the underlying Socket.
- */
- removeInternalSocketHandlers() {
- // Pauses data flow of the socket (this is internally resumed after 'established' is emitted)
- this._socket.pause();
- this._socket.removeListener('data', this._onDataReceived);
- this._socket.removeListener('close', this._onClose);
- this._socket.removeListener('error', this._onError);
- this._socket.removeListener('connect', this.onConnect);
+}
+
+function reconstructStack(message, stacks) {
+ for (var i = 0; i < stacks.length - 1; ++i) {
+ stacks[i].push("From previous event:");
+ stacks[i] = stacks[i].join("\n");
}
- /**
- * Closes and destroys the underlying Socket. Emits an error event.
- * @param err { String } An error string to include in error event.
- */
- _closeSocket(err) {
- // Make sure only one 'error' event is fired for the lifetime of this SocksClient instance.
- if (this.state !== constants_1.SocksClientState.Error) {
- // Set internal state to Error.
- this.state = constants_1.SocksClientState.Error;
- // Destroy Socket
- this._socket.destroy();
- // Remove internal listeners
- this.removeInternalSocketHandlers();
- // Fire 'error' event.
- this.emit('error', new util_1.SocksClientError(err, this._options));
- }
+ if (i < stacks.length) {
+ stacks[i] = stacks[i].join("\n");
}
- /**
- * Sends initial Socks v4 handshake request.
- */
- sendSocks4InitialHandshake() {
- const userId = this._options.proxy.userId || '';
- const buff = new smart_buffer_1.SmartBuffer();
- buff.writeUInt8(0x04);
- buff.writeUInt8(constants_1.SocksCommand[this._options.command]);
- buff.writeUInt16BE(this._options.destination.port);
- // Socks 4 (IPv4)
- if (net.isIPv4(this._options.destination.host)) {
- buff.writeBuffer(ip.toBuffer(this._options.destination.host));
- buff.writeStringNT(userId);
- // Socks 4a (hostname)
- }
- else {
- buff.writeUInt8(0x00);
- buff.writeUInt8(0x00);
- buff.writeUInt8(0x00);
- buff.writeUInt8(0x01);
- buff.writeStringNT(userId);
- buff.writeStringNT(this._options.destination.host);
+ return message + "\n" + stacks.join("\n");
+}
+
+function removeDuplicateOrEmptyJumps(stacks) {
+ for (var i = 0; i < stacks.length; ++i) {
+ if (stacks[i].length === 0 ||
+ ((i + 1 < stacks.length) && stacks[i][0] === stacks[i+1][0])) {
+ stacks.splice(i, 1);
+ i--;
}
- this._nextRequiredPacketBufferSize =
- constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks4Response;
- this._socket.write(buff.toBuffer());
}
- /**
- * Handles Socks v4 handshake response.
- * @param data
- */
- handleSocks4FinalHandshakeResponse() {
- const data = this._receiveBuffer.get(8);
- if (data[1] !== constants_1.Socks4Response.Granted) {
- this._closeSocket(`${constants_1.ERRORS.Socks4ProxyRejectedConnection} - (${constants_1.Socks4Response[data[1]]})`);
- }
- else {
- // Bind response
- if (constants_1.SocksCommand[this._options.command] === constants_1.SocksCommand.bind) {
- const buff = smart_buffer_1.SmartBuffer.fromBuffer(data);
- buff.readOffset = 2;
- const remoteHost = {
- port: buff.readUInt16BE(),
- host: ip.fromLong(buff.readUInt32BE())
- };
- // If host is 0.0.0.0, set to proxy host.
- if (remoteHost.host === '0.0.0.0') {
- remoteHost.host = this._options.proxy.ipaddress;
- }
- this.state = constants_1.SocksClientState.BoundWaitingForConnection;
- this.emit('bound', { socket: this._socket, remoteHost });
- // Connect response
+}
+
+function removeCommonRoots(stacks) {
+ var current = stacks[0];
+ for (var i = 1; i < stacks.length; ++i) {
+ var prev = stacks[i];
+ var currentLastIndex = current.length - 1;
+ var currentLastLine = current[currentLastIndex];
+ var commonRootMeetPoint = -1;
+
+ for (var j = prev.length - 1; j >= 0; --j) {
+ if (prev[j] === currentLastLine) {
+ commonRootMeetPoint = j;
+ break;
}
- else {
- this.state = constants_1.SocksClientState.Established;
- this.removeInternalSocketHandlers();
- this.emit('established', { socket: this._socket });
+ }
+
+ for (var j = commonRootMeetPoint; j >= 0; --j) {
+ var line = prev[j];
+ if (current[currentLastIndex] === line) {
+ current.pop();
+ currentLastIndex--;
+ } else {
+ break;
}
}
+ current = prev;
}
- /**
- * Handles Socks v4 incoming connection request (BIND)
- * @param data
- */
- handleSocks4IncomingConnectionResponse() {
- const data = this._receiveBuffer.get(8);
- if (data[1] !== constants_1.Socks4Response.Granted) {
- this._closeSocket(`${constants_1.ERRORS.Socks4ProxyRejectedIncomingBoundConnection} - (${constants_1.Socks4Response[data[1]]})`);
- }
- else {
- const buff = smart_buffer_1.SmartBuffer.fromBuffer(data);
- buff.readOffset = 2;
- const remoteHost = {
- port: buff.readUInt16BE(),
- host: ip.fromLong(buff.readUInt32BE())
- };
- this.state = constants_1.SocksClientState.Established;
- this.removeInternalSocketHandlers();
- this.emit('established', { socket: this._socket, remoteHost });
+}
+
+function cleanStack(stack) {
+ var ret = [];
+ for (var i = 0; i < stack.length; ++i) {
+ var line = stack[i];
+ var isTraceLine = " (No stack trace)" === line ||
+ stackFramePattern.test(line);
+ var isInternalFrame = isTraceLine && shouldIgnore(line);
+ if (isTraceLine && !isInternalFrame) {
+ if (indentStackFrames && line.charAt(0) !== " ") {
+ line = " " + line;
+ }
+ ret.push(line);
}
}
- /**
- * Sends initial Socks v5 handshake request.
- */
- sendSocks5InitialHandshake() {
- const buff = new smart_buffer_1.SmartBuffer();
- buff.writeUInt8(0x05);
- // We should only tell the proxy we support user/pass auth if auth info is actually provided.
- // Note: As of Tor v0.3.5.7+, if user/pass auth is an option from the client, by default it will always take priority.
- if (this._options.proxy.userId || this._options.proxy.password) {
- buff.writeUInt8(2);
- buff.writeUInt8(constants_1.Socks5Auth.NoAuth);
- buff.writeUInt8(constants_1.Socks5Auth.UserPass);
- }
- else {
- buff.writeUInt8(1);
- buff.writeUInt8(constants_1.Socks5Auth.NoAuth);
+ return ret;
+}
+
+function stackFramesAsArray(error) {
+ var stack = error.stack.replace(/\s+$/g, "").split("\n");
+ for (var i = 0; i < stack.length; ++i) {
+ var line = stack[i];
+ if (" (No stack trace)" === line || stackFramePattern.test(line)) {
+ break;
}
- this._nextRequiredPacketBufferSize =
- constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5InitialHandshakeResponse;
- this._socket.write(buff.toBuffer());
- this.state = constants_1.SocksClientState.SentInitialHandshake;
}
- /**
- * Handles initial Socks v5 handshake response.
- * @param data
- */
- handleInitialSocks5HandshakeResponse() {
- const data = this._receiveBuffer.get(2);
- if (data[0] !== 0x05) {
- this._closeSocket(constants_1.ERRORS.InvalidSocks5IntiailHandshakeSocksVersion);
- }
- else if (data[1] === 0xff) {
- this._closeSocket(constants_1.ERRORS.InvalidSocks5InitialHandshakeNoAcceptedAuthType);
- }
- else {
- // If selected Socks v5 auth method is no auth, send final handshake request.
- if (data[1] === constants_1.Socks5Auth.NoAuth) {
- this.sendSocks5CommandRequest();
- // If selected Socks v5 auth method is user/password, send auth handshake.
- }
- else if (data[1] === constants_1.Socks5Auth.UserPass) {
- this.sendSocks5UserPassAuthentication();
- }
- else {
- this._closeSocket(constants_1.ERRORS.InvalidSocks5InitialHandshakeUnknownAuthType);
- }
- }
- }
- /**
- * Sends Socks v5 user & password auth handshake.
- *
- * Note: No auth and user/pass are currently supported.
- */
- sendSocks5UserPassAuthentication() {
- const userId = this._options.proxy.userId || '';
- const password = this._options.proxy.password || '';
- const buff = new smart_buffer_1.SmartBuffer();
- buff.writeUInt8(0x01);
- buff.writeUInt8(Buffer.byteLength(userId));
- buff.writeString(userId);
- buff.writeUInt8(Buffer.byteLength(password));
- buff.writeString(password);
- this._nextRequiredPacketBufferSize =
- constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5UserPassAuthenticationResponse;
- this._socket.write(buff.toBuffer());
- this.state = constants_1.SocksClientState.SentAuthentication;
- }
- /**
- * Handles Socks v5 auth handshake response.
- * @param data
- */
- handleInitialSocks5AuthenticationHandshakeResponse() {
- this.state = constants_1.SocksClientState.ReceivedAuthenticationResponse;
- const data = this._receiveBuffer.get(2);
- if (data[1] !== 0x00) {
- this._closeSocket(constants_1.ERRORS.Socks5AuthenticationFailed);
- }
- else {
- this.sendSocks5CommandRequest();
- }
+ if (i > 0 && error.name != "SyntaxError") {
+ stack = stack.slice(i);
}
- /**
- * Sends Socks v5 final handshake request.
- */
- sendSocks5CommandRequest() {
- const buff = new smart_buffer_1.SmartBuffer();
- buff.writeUInt8(0x05);
- buff.writeUInt8(constants_1.SocksCommand[this._options.command]);
- buff.writeUInt8(0x00);
- // ipv4, ipv6, domain?
- if (net.isIPv4(this._options.destination.host)) {
- buff.writeUInt8(constants_1.Socks5HostType.IPv4);
- buff.writeBuffer(ip.toBuffer(this._options.destination.host));
- }
- else if (net.isIPv6(this._options.destination.host)) {
- buff.writeUInt8(constants_1.Socks5HostType.IPv6);
- buff.writeBuffer(ip.toBuffer(this._options.destination.host));
+ return stack;
+}
+
+function parseStackAndMessage(error) {
+ var stack = error.stack;
+ var message = error.toString();
+ stack = typeof stack === "string" && stack.length > 0
+ ? stackFramesAsArray(error) : [" (No stack trace)"];
+ return {
+ message: message,
+ stack: error.name == "SyntaxError" ? stack : cleanStack(stack)
+ };
+}
+
+function formatAndLogError(error, title, isSoft) {
+ if (typeof console !== "undefined") {
+ var message;
+ if (util.isObject(error)) {
+ var stack = error.stack;
+ message = title + formatStack(stack, error);
+ } else {
+ message = title + String(error);
}
- else {
- buff.writeUInt8(constants_1.Socks5HostType.Hostname);
- buff.writeUInt8(this._options.destination.host.length);
- buff.writeString(this._options.destination.host);
+ if (typeof printWarning === "function") {
+ printWarning(message, isSoft);
+ } else if (typeof console.log === "function" ||
+ typeof console.log === "object") {
+ console.log(message);
}
- buff.writeUInt16BE(this._options.destination.port);
- this._nextRequiredPacketBufferSize =
- constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseHeader;
- this._socket.write(buff.toBuffer());
- this.state = constants_1.SocksClientState.SentFinalHandshake;
}
- /**
- * Handles Socks v5 final handshake response.
- * @param data
- */
- handleSocks5FinalHandshakeResponse() {
- // Peek at available data (we need at least 5 bytes to get the hostname length)
- const header = this._receiveBuffer.peek(5);
- if (header[0] !== 0x05 || header[1] !== constants_1.Socks5Response.Granted) {
- this._closeSocket(`${constants_1.ERRORS.InvalidSocks5FinalHandshakeRejected} - ${constants_1.Socks5Response[header[1]]}`);
- }
- else {
- // Read address type
- const addressType = header[3];
- let remoteHost;
- let buff;
- // IPv4
- if (addressType === constants_1.Socks5HostType.IPv4) {
- // Check if data is available.
- const dataNeeded = constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseIPv4;
- if (this._receiveBuffer.length < dataNeeded) {
- this._nextRequiredPacketBufferSize = dataNeeded;
- return;
- }
- buff = smart_buffer_1.SmartBuffer.fromBuffer(this._receiveBuffer.get(dataNeeded).slice(4));
- remoteHost = {
- host: ip.fromLong(buff.readUInt32BE()),
- port: buff.readUInt16BE()
- };
- // If given host is 0.0.0.0, assume remote proxy ip instead.
- if (remoteHost.host === '0.0.0.0') {
- remoteHost.host = this._options.proxy.ipaddress;
- }
- // Hostname
- }
- else if (addressType === constants_1.Socks5HostType.Hostname) {
- const hostLength = header[4];
- const dataNeeded = constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseHostname(hostLength); // header + host length + host + port
- // Check if data is available.
- if (this._receiveBuffer.length < dataNeeded) {
- this._nextRequiredPacketBufferSize = dataNeeded;
- return;
- }
- buff = smart_buffer_1.SmartBuffer.fromBuffer(this._receiveBuffer.get(dataNeeded).slice(5) // Slice at 5 to skip host length
- );
- remoteHost = {
- host: buff.readString(hostLength),
- port: buff.readUInt16BE()
- };
- // IPv6
- }
- else if (addressType === constants_1.Socks5HostType.IPv6) {
- // Check if data is available.
- const dataNeeded = constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseIPv6;
- if (this._receiveBuffer.length < dataNeeded) {
- this._nextRequiredPacketBufferSize = dataNeeded;
- return;
- }
- buff = smart_buffer_1.SmartBuffer.fromBuffer(this._receiveBuffer.get(dataNeeded).slice(4));
- remoteHost = {
- host: ip.toString(buff.readBuffer(16)),
- port: buff.readUInt16BE()
- };
- }
- // We have everything we need
- this.state = constants_1.SocksClientState.ReceivedFinalResponse;
- // If using CONNECT, the client is now in the established state.
- if (constants_1.SocksCommand[this._options.command] === constants_1.SocksCommand.connect) {
- this.state = constants_1.SocksClientState.Established;
- this.removeInternalSocketHandlers();
- this.emit('established', { socket: this._socket });
- }
- else if (constants_1.SocksCommand[this._options.command] === constants_1.SocksCommand.bind) {
- /* If using BIND, the Socks client is now in BoundWaitingForConnection state.
- This means that the remote proxy server is waiting for a remote connection to the bound port. */
- this.state = constants_1.SocksClientState.BoundWaitingForConnection;
- this._nextRequiredPacketBufferSize =
- constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseHeader;
- this.emit('bound', { socket: this._socket, remoteHost });
- /*
- If using Associate, the Socks client is now Established. And the proxy server is now accepting UDP packets at the
- given bound port. This initial Socks TCP connection must remain open for the UDP relay to continue to work.
- */
- }
- else if (constants_1.SocksCommand[this._options.command] === constants_1.SocksCommand.associate) {
- this.state = constants_1.SocksClientState.Established;
- this.removeInternalSocketHandlers();
- this.emit('established', { socket: this._socket, remoteHost });
+}
+
+function fireRejectionEvent(name, localHandler, reason, promise) {
+ var localEventFired = false;
+ try {
+ if (typeof localHandler === "function") {
+ localEventFired = true;
+ if (name === "rejectionHandled") {
+ localHandler(promise);
+ } else {
+ localHandler(reason, promise);
}
}
+ } catch (e) {
+ async.throwLater(e);
}
- /**
- * Handles Socks v5 incoming connection request (BIND).
- */
- handleSocks5IncomingConnectionResponse() {
- // Peek at available data (we need at least 5 bytes to get the hostname length)
- const header = this._receiveBuffer.peek(5);
- if (header[0] !== 0x05 || header[1] !== constants_1.Socks5Response.Granted) {
- this._closeSocket(`${constants_1.ERRORS.Socks5ProxyRejectedIncomingBoundConnection} - ${constants_1.Socks5Response[header[1]]}`);
+
+ if (name === "unhandledRejection") {
+ if (!activeFireEvent(name, reason, promise) && !localEventFired) {
+ formatAndLogError(reason, "Unhandled rejection ");
}
- else {
- // Read address type
- const addressType = header[3];
- let remoteHost;
- let buff;
- // IPv4
- if (addressType === constants_1.Socks5HostType.IPv4) {
- // Check if data is available.
- const dataNeeded = constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseIPv4;
- if (this._receiveBuffer.length < dataNeeded) {
- this._nextRequiredPacketBufferSize = dataNeeded;
- return;
- }
- buff = smart_buffer_1.SmartBuffer.fromBuffer(this._receiveBuffer.get(dataNeeded).slice(4));
- remoteHost = {
- host: ip.fromLong(buff.readUInt32BE()),
- port: buff.readUInt16BE()
- };
- // If given host is 0.0.0.0, assume remote proxy ip instead.
- if (remoteHost.host === '0.0.0.0') {
- remoteHost.host = this._options.proxy.ipaddress;
- }
- // Hostname
- }
- else if (addressType === constants_1.Socks5HostType.Hostname) {
- const hostLength = header[4];
- const dataNeeded = constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseHostname(hostLength); // header + host length + port
- // Check if data is available.
- if (this._receiveBuffer.length < dataNeeded) {
- this._nextRequiredPacketBufferSize = dataNeeded;
- return;
- }
- buff = smart_buffer_1.SmartBuffer.fromBuffer(this._receiveBuffer.get(dataNeeded).slice(5) // Slice at 5 to skip host length
- );
- remoteHost = {
- host: buff.readString(hostLength),
- port: buff.readUInt16BE()
- };
- // IPv6
+ } else {
+ activeFireEvent(name, promise);
+ }
+}
+
+function formatNonError(obj) {
+ var str;
+ if (typeof obj === "function") {
+ str = "[function " +
+ (obj.name || "anonymous") +
+ "]";
+ } else {
+ str = obj && typeof obj.toString === "function"
+ ? obj.toString() : util.toString(obj);
+ var ruselessToString = /\[object [a-zA-Z0-9$_]+\]/;
+ if (ruselessToString.test(str)) {
+ try {
+ var newStr = JSON.stringify(obj);
+ str = newStr;
}
- else if (addressType === constants_1.Socks5HostType.IPv6) {
- // Check if data is available.
- const dataNeeded = constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseIPv6;
- if (this._receiveBuffer.length < dataNeeded) {
- this._nextRequiredPacketBufferSize = dataNeeded;
- return;
- }
- buff = smart_buffer_1.SmartBuffer.fromBuffer(this._receiveBuffer.get(dataNeeded).slice(4));
- remoteHost = {
- host: ip.toString(buff.readBuffer(16)),
- port: buff.readUInt16BE()
- };
+ catch(e) {
+
}
- this.state = constants_1.SocksClientState.Established;
- this.removeInternalSocketHandlers();
- this.emit('established', { socket: this._socket, remoteHost });
+ }
+ if (str.length === 0) {
+ str = "(empty array)";
}
}
- get socksClientOptions() {
- return Object.assign({}, this._options);
- }
+ return ("(<" + snip(str) + ">, no stack trace)");
}
-exports.SocksClient = SocksClient;
-//# sourceMappingURL=socksclient.js.map
-
-/***/ }),
-/* 360 */,
-/* 361 */,
-/* 362 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-var util = __webpack_require__(669)
-var messages = __webpack_require__(132)
+function snip(str) {
+ var maxChars = 41;
+ if (str.length < maxChars) {
+ return str;
+ }
+ return str.substr(0, maxChars - 3) + "...";
+}
-module.exports = function() {
- var args = Array.prototype.slice.call(arguments, 0)
- var warningName = args.shift()
- if (warningName == "typo") {
- return makeTypoWarning.apply(null,args)
- }
- else {
- var msgTemplate = messages[warningName] ? messages[warningName] : warningName + ": '%s'"
- args.unshift(msgTemplate)
- return util.format.apply(null, args)
- }
+function longStackTracesIsSupported() {
+ return typeof captureStackTrace === "function";
}
-function makeTypoWarning (providedName, probableName, field) {
- if (field) {
- providedName = field + "['" + providedName + "']"
- probableName = field + "['" + probableName + "']"
- }
- return util.format(messages.typo, providedName, probableName)
+var shouldIgnore = function() { return false; };
+var parseLineInfoRegex = /[\/<\(]([^:\/]+):(\d+):(?:\d+)\)?\s*$/;
+function parseLineInfo(line) {
+ var matches = line.match(parseLineInfoRegex);
+ if (matches) {
+ return {
+ fileName: matches[1],
+ line: parseInt(matches[2], 10)
+ };
+ }
}
+function setBounds(firstLineError, lastLineError) {
+ if (!longStackTracesIsSupported()) return;
+ var firstStackLines = (firstLineError.stack || "").split("\n");
+ var lastStackLines = (lastLineError.stack || "").split("\n");
+ var firstIndex = -1;
+ var lastIndex = -1;
+ var firstFileName;
+ var lastFileName;
+ for (var i = 0; i < firstStackLines.length; ++i) {
+ var result = parseLineInfo(firstStackLines[i]);
+ if (result) {
+ firstFileName = result.fileName;
+ firstIndex = result.line;
+ break;
+ }
+ }
+ for (var i = 0; i < lastStackLines.length; ++i) {
+ var result = parseLineInfo(lastStackLines[i]);
+ if (result) {
+ lastFileName = result.fileName;
+ lastIndex = result.line;
+ break;
+ }
+ }
+ if (firstIndex < 0 || lastIndex < 0 || !firstFileName || !lastFileName ||
+ firstFileName !== lastFileName || firstIndex >= lastIndex) {
+ return;
+ }
-/***/ }),
-/* 363 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+ shouldIgnore = function(line) {
+ if (bluebirdFramePattern.test(line)) return true;
+ var info = parseLineInfo(line);
+ if (info) {
+ if (info.fileName === firstFileName &&
+ (firstIndex <= info.line && info.line <= lastIndex)) {
+ return true;
+ }
+ }
+ return false;
+ };
+}
-"use strict";
+function CapturedTrace(parent) {
+ this._parent = parent;
+ this._promisesCreated = 0;
+ var length = this._length = 1 + (parent === undefined ? 0 : parent._length);
+ captureStackTrace(this, CapturedTrace);
+ if (length > 32) this.uncycle();
+}
+util.inherits(CapturedTrace, Error);
+Context.CapturedTrace = CapturedTrace;
-module.exports = function(Promise,
- PromiseArray,
- apiRejection,
- tryConvertToPromise,
- INTERNAL,
- debug) {
-var util = __webpack_require__(248);
-var tryCatch = util.tryCatch;
+CapturedTrace.prototype.uncycle = function() {
+ var length = this._length;
+ if (length < 2) return;
+ var nodes = [];
+ var stackToIndex = {};
-function ReductionPromiseArray(promises, fn, initialValue, _each) {
- this.constructor$(promises);
- var context = Promise._getContext();
- this._fn = util.contextBind(context, fn);
- if (initialValue !== undefined) {
- initialValue = Promise.resolve(initialValue);
- initialValue._attachCancellationCallback(this);
+ for (var i = 0, node = this; node !== undefined; ++i) {
+ nodes.push(node);
+ node = node._parent;
}
- this._initialValue = initialValue;
- this._currentCancellable = null;
- if(_each === INTERNAL) {
- this._eachValues = Array(this._length);
- } else if (_each === 0) {
- this._eachValues = null;
- } else {
- this._eachValues = undefined;
+ length = this._length = i;
+ for (var i = length - 1; i >= 0; --i) {
+ var stack = nodes[i].stack;
+ if (stackToIndex[stack] === undefined) {
+ stackToIndex[stack] = i;
+ }
}
- this._promise._captureStackTrace();
- this._init$(undefined, -5);
-}
-util.inherits(ReductionPromiseArray, PromiseArray);
+ for (var i = 0; i < length; ++i) {
+ var currentStack = nodes[i].stack;
+ var index = stackToIndex[currentStack];
+ if (index !== undefined && index !== i) {
+ if (index > 0) {
+ nodes[index - 1]._parent = undefined;
+ nodes[index - 1]._length = 1;
+ }
+ nodes[i]._parent = undefined;
+ nodes[i]._length = 1;
+ var cycleEdgeNode = i > 0 ? nodes[i - 1] : this;
-ReductionPromiseArray.prototype._gotAccum = function(accum) {
- if (this._eachValues !== undefined &&
- this._eachValues !== null &&
- accum !== INTERNAL) {
- this._eachValues.push(accum);
+ if (index < length - 1) {
+ cycleEdgeNode._parent = nodes[index + 1];
+ cycleEdgeNode._parent.uncycle();
+ cycleEdgeNode._length =
+ cycleEdgeNode._parent._length + 1;
+ } else {
+ cycleEdgeNode._parent = undefined;
+ cycleEdgeNode._length = 1;
+ }
+ var currentChildLength = cycleEdgeNode._length + 1;
+ for (var j = i - 2; j >= 0; --j) {
+ nodes[j]._length = currentChildLength;
+ currentChildLength++;
+ }
+ return;
+ }
}
};
-ReductionPromiseArray.prototype._eachComplete = function(value) {
- if (this._eachValues !== null) {
- this._eachValues.push(value);
+CapturedTrace.prototype.attachExtraTrace = function(error) {
+ if (error.__stackCleaned__) return;
+ this.uncycle();
+ var parsed = parseStackAndMessage(error);
+ var message = parsed.message;
+ var stacks = [parsed.stack];
+
+ var trace = this;
+ while (trace !== undefined) {
+ stacks.push(cleanStack(trace.stack.split("\n")));
+ trace = trace._parent;
}
- return this._eachValues;
+ removeCommonRoots(stacks);
+ removeDuplicateOrEmptyJumps(stacks);
+ util.notEnumerableProp(error, "stack", reconstructStack(message, stacks));
+ util.notEnumerableProp(error, "__stackCleaned__", true);
};
-ReductionPromiseArray.prototype._init = function() {};
+var captureStackTrace = (function stackDetection() {
+ var v8stackFramePattern = /^\s*at\s*/;
+ var v8stackFormatter = function(stack, error) {
+ if (typeof stack === "string") return stack;
-ReductionPromiseArray.prototype._resolveEmptyArray = function() {
- this._resolve(this._eachValues !== undefined ? this._eachValues
- : this._initialValue);
-};
+ if (error.name !== undefined &&
+ error.message !== undefined) {
+ return error.toString();
+ }
+ return formatNonError(error);
+ };
-ReductionPromiseArray.prototype.shouldCopyValues = function () {
- return false;
-};
+ if (typeof Error.stackTraceLimit === "number" &&
+ typeof Error.captureStackTrace === "function") {
+ Error.stackTraceLimit += 6;
+ stackFramePattern = v8stackFramePattern;
+ formatStack = v8stackFormatter;
+ var captureStackTrace = Error.captureStackTrace;
-ReductionPromiseArray.prototype._resolve = function(value) {
- this._promise._resolveCallback(value);
- this._values = null;
-};
-
-ReductionPromiseArray.prototype._resultCancelled = function(sender) {
- if (sender === this._initialValue) return this._cancel();
- if (this._isResolved()) return;
- this._resultCancelled$();
- if (this._currentCancellable instanceof Promise) {
- this._currentCancellable.cancel();
- }
- if (this._initialValue instanceof Promise) {
- this._initialValue.cancel();
+ shouldIgnore = function(line) {
+ return bluebirdFramePattern.test(line);
+ };
+ return function(receiver, ignoreUntil) {
+ Error.stackTraceLimit += 6;
+ captureStackTrace(receiver, ignoreUntil);
+ Error.stackTraceLimit -= 6;
+ };
}
-};
+ var err = new Error();
-ReductionPromiseArray.prototype._iterate = function (values) {
- this._values = values;
- var value;
- var i;
- var length = values.length;
- if (this._initialValue !== undefined) {
- value = this._initialValue;
- i = 0;
- } else {
- value = Promise.resolve(values[0]);
- i = 1;
+ if (typeof err.stack === "string" &&
+ err.stack.split("\n")[0].indexOf("stackDetection@") >= 0) {
+ stackFramePattern = /@/;
+ formatStack = v8stackFormatter;
+ indentStackFrames = true;
+ return function captureStackTrace(o) {
+ o.stack = new Error().stack;
+ };
}
- this._currentCancellable = value;
-
- for (var j = i; j < length; ++j) {
- var maybePromise = values[j];
- if (maybePromise instanceof Promise) {
- maybePromise.suppressUnhandledRejections();
- }
+ var hasStackAfterThrow;
+ try { throw new Error(); }
+ catch(e) {
+ hasStackAfterThrow = ("stack" in e);
+ }
+ if (!("stack" in err) && hasStackAfterThrow &&
+ typeof Error.stackTraceLimit === "number") {
+ stackFramePattern = v8stackFramePattern;
+ formatStack = v8stackFormatter;
+ return function captureStackTrace(o) {
+ Error.stackTraceLimit += 6;
+ try { throw new Error(); }
+ catch(e) { o.stack = e.stack; }
+ Error.stackTraceLimit -= 6;
+ };
}
- if (!value.isRejected()) {
- for (; i < length; ++i) {
- var ctx = {
- accum: null,
- value: values[i],
- index: i,
- length: length,
- array: this
- };
-
- value = value._then(gotAccum, undefined, undefined, ctx, undefined);
+ formatStack = function(stack, error) {
+ if (typeof stack === "string") return stack;
- if ((i & 127) === 0) {
- value._setNoAsyncGuarantee();
- }
+ if ((typeof error === "object" ||
+ typeof error === "function") &&
+ error.name !== undefined &&
+ error.message !== undefined) {
+ return error.toString();
}
- }
-
- if (this._eachValues !== undefined) {
- value = value
- ._then(this._eachComplete, undefined, undefined, this, undefined);
- }
- value._then(completed, completed, undefined, value, this);
-};
+ return formatNonError(error);
+ };
-Promise.prototype.reduce = function (fn, initialValue) {
- return reduce(this, fn, initialValue, null);
-};
+ return null;
-Promise.reduce = function (promises, fn, initialValue, _each) {
- return reduce(promises, fn, initialValue, _each);
-};
+})([]);
-function completed(valueOrReason, array) {
- if (this.isFulfilled()) {
- array._resolve(valueOrReason);
- } else {
- array._reject(valueOrReason);
+if (typeof console !== "undefined" && typeof console.warn !== "undefined") {
+ printWarning = function (message) {
+ console.warn(message);
+ };
+ if (util.isNode && process.stderr.isTTY) {
+ printWarning = function(message, isSoft) {
+ var color = isSoft ? "\u001b[33m" : "\u001b[31m";
+ console.warn(color + message + "\u001b[0m\n");
+ };
+ } else if (!util.isNode && typeof (new Error().stack) === "string") {
+ printWarning = function(message, isSoft) {
+ console.warn("%c" + message,
+ isSoft ? "color: darkorange" : "color: red");
+ };
}
}
-function reduce(promises, fn, initialValue, _each) {
- if (typeof fn !== "function") {
- return apiRejection("expecting a function but got " + util.classString(fn));
- }
- var array = new ReductionPromiseArray(promises, fn, initialValue, _each);
- return array.promise();
-}
+var config = {
+ warnings: warnings,
+ longStackTraces: false,
+ cancellation: false,
+ monitoring: false,
+ asyncHooks: false
+};
-function gotAccum(accum) {
- this.accum = accum;
- this.array._gotAccum(accum);
- var value = tryConvertToPromise(this.value, this.array._promise);
- if (value instanceof Promise) {
- this.array._currentCancellable = value;
- return value._then(gotValue, undefined, undefined, this, undefined);
- } else {
- return gotValue.call(this, value);
- }
-}
+if (longStackTraces) Promise.longStackTraces();
-function gotValue(value) {
- var array = this.array;
- var promise = array._promise;
- var fn = tryCatch(array._fn);
- promise._pushContext();
- var ret;
- if (array._eachValues !== undefined) {
- ret = fn.call(promise._boundValue(), value, this.index, this.length);
- } else {
- ret = fn.call(promise._boundValue(),
- this.accum, value, this.index, this.length);
- }
- if (ret instanceof Promise) {
- array._currentCancellable = ret;
- }
- var promiseCreated = promise._popContext();
- debug.checkForgottenReturns(
- ret,
- promiseCreated,
- array._eachValues !== undefined ? "Promise.each" : "Promise.reduce",
- promise
- );
- return ret;
-}
+return {
+ asyncHooks: function() {
+ return config.asyncHooks;
+ },
+ longStackTraces: function() {
+ return config.longStackTraces;
+ },
+ warnings: function() {
+ return config.warnings;
+ },
+ cancellation: function() {
+ return config.cancellation;
+ },
+ monitoring: function() {
+ return config.monitoring;
+ },
+ propagateFromFunction: function() {
+ return propagateFromFunction;
+ },
+ boundValueFunction: function() {
+ return boundValueFunction;
+ },
+ checkForgottenReturns: checkForgottenReturns,
+ setBounds: setBounds,
+ warn: warn,
+ deprecated: deprecated,
+ CapturedTrace: CapturedTrace,
+ fireDomEvent: fireDomEvent,
+ fireGlobalEvent: fireGlobalEvent
+};
};
/***/ }),
-/* 364 */
-/***/ (function(module) {
+/* 273 */
+/***/ (function(module, exports, __webpack_require__) {
"use strict";
-module.exports = (flag, argv = process.argv) => {
- const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--');
- const position = argv.indexOf(prefix + flag);
- const terminatorPosition = argv.indexOf('--');
- return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition);
-};
-
-
-/***/ }),
-/* 365 */,
-/* 366 */
-/***/ (function(module) {
+const path = __webpack_require__(622)
+const fs = __webpack_require__(598)
+const chain = __webpack_require__(433).chain
+const mkdir = __webpack_require__(836)
+const rm = __webpack_require__(974)
+const inferOwner = __webpack_require__(686)
+const chown = __webpack_require__(358)
-var twoify = function (n) {
- if (n && !(n & (n - 1))) return n
- var p = 1
- while (p < n) p <<= 1
- return p
+exports = module.exports = {
+ link: link,
+ linkIfExists: linkIfExists
}
-var Cyclist = function (size) {
- if (!(this instanceof Cyclist)) return new Cyclist(size)
- size = twoify(size)
- this.mask = size - 1
- this.size = size
- this.values = new Array(size)
+function linkIfExists (from, to, opts, cb) {
+ opts.currentIsLink = false
+ opts.currentExists = false
+ fs.stat(from, function (er) {
+ if (er) return cb()
+ fs.readlink(to, function (er, fromOnDisk) {
+ if (!er || er.code !== 'ENOENT') {
+ opts.currentExists = true
+ }
+ // if the link already exists and matches what we would do,
+ // we don't need to do anything
+ if (!er) {
+ opts.currentIsLink = true
+ var toDir = path.dirname(to)
+ var absoluteFrom = path.resolve(toDir, from)
+ var absoluteFromOnDisk = path.resolve(toDir, fromOnDisk)
+ opts.currentTarget = absoluteFromOnDisk
+ if (absoluteFrom === absoluteFromOnDisk) return cb()
+ }
+ link(from, to, opts, cb)
+ })
+ })
}
-Cyclist.prototype.put = function (index, val) {
- var pos = index & this.mask
- this.values[pos] = val
- return pos
+function resolveIfSymlink (maybeSymlinkPath, cb) {
+ fs.lstat(maybeSymlinkPath, function (err, stat) {
+ if (err) return cb.apply(this, arguments)
+ if (!stat.isSymbolicLink()) return cb(null, maybeSymlinkPath)
+ fs.readlink(maybeSymlinkPath, cb)
+ })
}
-Cyclist.prototype.get = function (index) {
- return this.values[index & this.mask]
+function ensureFromIsNotSource (from, to, cb) {
+ resolveIfSymlink(from, function (err, fromDestination) {
+ if (err) return cb.apply(this, arguments)
+ if (path.resolve(path.dirname(from), fromDestination) === path.resolve(to)) {
+ return cb(new Error('Link target resolves to the same directory as link source: ' + to))
+ }
+ cb.apply(this, arguments)
+ })
}
-Cyclist.prototype.del = function (index) {
- var pos = index & this.mask
- var val = this.values[pos]
- this.values[pos] = undefined
- return val
-}
+function link (from, to, opts, cb) {
+ to = path.resolve(to)
+ opts.base = path.dirname(to)
+ var absTarget = path.resolve(opts.base, from)
+ var relativeTarget = path.relative(opts.base, absTarget)
+ var target = opts.absolute ? absTarget : relativeTarget
-module.exports = Cyclist
+ const tasks = [
+ [ensureFromIsNotSource, absTarget, to],
+ [fs, 'stat', absTarget],
+ [clobberLinkGently, from, to, opts],
+ [mkdir, path.dirname(to)],
+ [fs, 'symlink', target, to, 'junction']
+ ]
+ if (chown.selfOwner.uid !== 0) {
+ chain(tasks, cb)
+ } else {
+ inferOwner(to).then(owner => {
+ tasks.push([chown, to, owner.uid, owner.gid])
+ chain(tasks, cb)
+ })
+ }
+}
-/***/ }),
-/* 367 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+exports._clobberLinkGently = clobberLinkGently
+function clobberLinkGently (from, to, opts, cb) {
+ if (opts.currentExists === false) {
+ // nothing to clobber!
+ opts.log.silly('gently link', 'link does not already exist', {
+ link: to,
+ target: from
+ })
+ return cb()
+ }
-"use strict";
+ if (!opts.clobberLinkGently ||
+ opts.force === true ||
+ !opts.gently ||
+ typeof opts.gently !== 'string') {
+ opts.log.silly('gently link', 'deleting existing link forcefully', {
+ link: to,
+ target: from,
+ force: opts.force,
+ gently: opts.gently,
+ clobberLinkGently: opts.clobberLinkGently
+ })
+ return rm(to, opts, cb)
+ }
+ if (!opts.currentIsLink) {
+ opts.log.verbose('gently link', 'cannot remove, not a link', to)
+ // don't delete. it'll fail with EEXIST when it tries to symlink.
+ return cb()
+ }
-module.exports = __webpack_require__(100)
+ if (opts.currentTarget.indexOf(opts.gently) === 0) {
+ opts.log.silly('gently link', 'delete existing link', to)
+ return rm(to, opts, cb)
+ } else {
+ opts.log.verbose('gently link', 'refusing to delete existing link', {
+ link: to,
+ currentTarget: opts.currentTarget,
+ newTarget: from,
+ gently: opts.gently
+ })
+ return cb()
+ }
+}
/***/ }),
-/* 368 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+/* 274 */,
+/* 275 */,
+/* 276 */
+/***/ (function(__unusedmodule, exports) {
"use strict";
+/*
+ * Copyright The OpenTelemetry Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+Object.defineProperty(exports, "__esModule", { value: true });
+//# sourceMappingURL=Logger.js.map
-const figgyPudding = __webpack_require__(122)
-const npa = __webpack_require__(482)
-const npmFetch = __webpack_require__(789)
-const semver = __webpack_require__(280)
-const url = __webpack_require__(835)
-
-const UnpublishConfig = figgyPudding({
- force: { default: false },
- Promise: { default: () => Promise }
-})
+/***/ }),
+/* 277 */,
+/* 278 */
+/***/ (function(__unusedmodule, exports) {
-module.exports = unpublish
-function unpublish (spec, opts) {
- opts = UnpublishConfig(opts)
- return new opts.Promise(resolve => resolve()).then(() => {
- spec = npa(spec)
- // NOTE: spec is used to pick the appropriate registry/auth combo.
- opts = opts.concat({ spec })
- const pkgUri = spec.escapedName
- return npmFetch.json(pkgUri, opts.concat({
- query: { write: true }
- })).then(pkg => {
- if (!spec.rawSpec || spec.rawSpec === '*') {
- return npmFetch(`${pkgUri}/-rev/${pkg._rev}`, opts.concat({
- method: 'DELETE',
- ignoreBody: true
- }))
- } else {
- const version = spec.rawSpec
- const allVersions = pkg.versions || {}
- const versionPublic = allVersions[version]
- let dist
- if (versionPublic) {
- dist = allVersions[version].dist
- }
- delete allVersions[version]
- // if it was the only version, then delete the whole package.
- if (!Object.keys(allVersions).length) {
- return npmFetch(`${pkgUri}/-rev/${pkg._rev}`, opts.concat({
- method: 'DELETE',
- ignoreBody: true
- }))
- } else if (versionPublic) {
- const latestVer = pkg['dist-tags'].latest
- Object.keys(pkg['dist-tags']).forEach(tag => {
- if (pkg['dist-tags'][tag] === version) {
- delete pkg['dist-tags'][tag]
- }
- })
+"use strict";
- if (latestVer === version) {
- pkg['dist-tags'].latest = Object.keys(
- allVersions
- ).sort(semver.compareLoose).pop()
- }
+/*
+ * Copyright The OpenTelemetry Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+Object.defineProperty(exports, "__esModule", { value: true });
+//# sourceMappingURL=CorrelationContext.js.map
- delete pkg._revisions
- delete pkg._attachments
- // Update packument with removed versions
- return npmFetch(`${pkgUri}/-rev/${pkg._rev}`, opts.concat({
- method: 'PUT',
- body: pkg,
- ignoreBody: true
- })).then(() => {
- // Remove the tarball itself
- return npmFetch.json(pkgUri, opts.concat({
- query: { write: true }
- })).then(({ _rev, _id }) => {
- const tarballUrl = url.parse(dist.tarball).pathname.substr(1)
- return npmFetch(`${tarballUrl}/-rev/${_rev}`, opts.concat({
- method: 'DELETE',
- ignoreBody: true
- }))
- })
- })
- }
- }
- }, err => {
- if (err.code !== 'E404') {
- throw err
- }
- })
- }).then(() => true)
-}
+/***/ }),
+/* 279 */
+/***/ (function(module) {
+"use strict";
-/***/ }),
-/* 369 */,
-/* 370 */,
-/* 371 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
-module.exports.pipe = __webpack_require__(284)
-module.exports.each = __webpack_require__(137)
-module.exports.pipeline = __webpack_require__(746)
-module.exports.duplex = __webpack_require__(394)
-module.exports.through = __webpack_require__(576)
-module.exports.concat = __webpack_require__(596)
-module.exports.finished = __webpack_require__(3)
-module.exports.from = __webpack_require__(868)
-module.exports.to = __webpack_require__(6)
-module.exports.parallel = __webpack_require__(565)
+module.exports = cacheKey
+function cacheKey (type, identifier) {
+ return ['pacote', type, identifier].join(':')
+}
/***/ }),
-/* 372 */
+/* 280 */
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
-const util_1 = __webpack_require__(526);
-const constants_1 = __webpack_require__(206);
-const stream = __webpack_require__(794);
+const buffer_1 = __webpack_require__(293);
/**
- * Validates the provided SocksClientOptions
- * @param options { SocksClientOptions }
- * @param acceptedCommands { string[] } A list of accepted SocksProxy commands.
+ * Error strings
*/
-function validateSocksClientOptions(options, acceptedCommands = ['connect', 'bind', 'associate']) {
- // Check SOCKs command option.
- if (!constants_1.SocksCommand[options.command]) {
- throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksCommand, options);
- }
- // Check SocksCommand for acceptable command.
- if (acceptedCommands.indexOf(options.command) === -1) {
- throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksCommandForOperation, options);
- }
- // Check destination
- if (!isValidSocksRemoteHost(options.destination)) {
- throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsDestination, options);
- }
- // Check SOCKS proxy to use
- if (!isValidSocksProxy(options.proxy)) {
- throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsProxy, options);
- }
- // Check timeout
- if (options.timeout && !isValidTimeoutValue(options.timeout)) {
- throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsTimeout, options);
- }
- // Check existing_socket (if provided)
- if (options.existing_socket &&
- !(options.existing_socket instanceof stream.Duplex)) {
- throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsExistingSocket, options);
+const ERRORS = {
+ INVALID_ENCODING: 'Invalid encoding provided. Please specify a valid encoding the internal Node.js Buffer supports.',
+ INVALID_SMARTBUFFER_SIZE: 'Invalid size provided. Size must be a valid integer greater than zero.',
+ INVALID_SMARTBUFFER_BUFFER: 'Invalid Buffer provided in SmartBufferOptions.',
+ INVALID_SMARTBUFFER_OBJECT: 'Invalid SmartBufferOptions object supplied to SmartBuffer constructor or factory methods.',
+ INVALID_OFFSET: 'An invalid offset value was provided.',
+ INVALID_OFFSET_NON_NUMBER: 'An invalid offset value was provided. A numeric value is required.',
+ INVALID_LENGTH: 'An invalid length value was provided.',
+ INVALID_LENGTH_NON_NUMBER: 'An invalid length value was provived. A numeric value is required.',
+ INVALID_TARGET_OFFSET: 'Target offset is beyond the bounds of the internal SmartBuffer data.',
+ INVALID_TARGET_LENGTH: 'Specified length value moves cursor beyong the bounds of the internal SmartBuffer data.',
+ INVALID_READ_BEYOND_BOUNDS: 'Attempted to read beyond the bounds of the managed data.',
+ INVALID_WRITE_BEYOND_BOUNDS: 'Attempted to write beyond the bounds of the managed data.'
+};
+exports.ERRORS = ERRORS;
+/**
+ * Checks if a given encoding is a valid Buffer encoding. (Throws an exception if check fails)
+ *
+ * @param { String } encoding The encoding string to check.
+ */
+function checkEncoding(encoding) {
+ if (!buffer_1.Buffer.isEncoding(encoding)) {
+ throw new Error(ERRORS.INVALID_ENCODING);
}
}
-exports.validateSocksClientOptions = validateSocksClientOptions;
+exports.checkEncoding = checkEncoding;
/**
- * Validates the SocksClientChainOptions
- * @param options { SocksClientChainOptions }
+ * Checks if a given number is a finite integer. (Throws an exception if check fails)
+ *
+ * @param { Number } value The number value to check.
*/
-function validateSocksClientChainOptions(options) {
- // Only connect is supported when chaining.
- if (options.command !== 'connect') {
- throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksCommandChain, options);
- }
- // Check destination
- if (!isValidSocksRemoteHost(options.destination)) {
- throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsDestination, options);
- }
- // Validate proxies (length)
- if (!(options.proxies &&
- Array.isArray(options.proxies) &&
- options.proxies.length >= 2)) {
- throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsProxiesLength, options);
- }
- // Validate proxies
- options.proxies.forEach((proxy) => {
- if (!isValidSocksProxy(proxy)) {
- throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsProxy, options);
+function isFiniteInteger(value) {
+ return typeof value === 'number' && isFinite(value) && isInteger(value);
+}
+exports.isFiniteInteger = isFiniteInteger;
+/**
+ * Checks if an offset/length value is valid. (Throws an exception if check fails)
+ *
+ * @param value The value to check.
+ * @param offset True if checking an offset, false if checking a length.
+ */
+function checkOffsetOrLengthValue(value, offset) {
+ if (typeof value === 'number') {
+ // Check for non finite/non integers
+ if (!isFiniteInteger(value) || value < 0) {
+ throw new Error(offset ? ERRORS.INVALID_OFFSET : ERRORS.INVALID_LENGTH);
}
- });
- // Check timeout
- if (options.timeout && !isValidTimeoutValue(options.timeout)) {
- throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsTimeout, options);
+ }
+ else {
+ throw new Error(offset ? ERRORS.INVALID_OFFSET_NON_NUMBER : ERRORS.INVALID_LENGTH_NON_NUMBER);
}
}
-exports.validateSocksClientChainOptions = validateSocksClientChainOptions;
/**
- * Validates a SocksRemoteHost
- * @param remoteHost { SocksRemoteHost }
+ * Checks if a length value is valid. (Throws an exception if check fails)
+ *
+ * @param { Number } length The value to check.
*/
-function isValidSocksRemoteHost(remoteHost) {
- return (remoteHost &&
- typeof remoteHost.host === 'string' &&
- typeof remoteHost.port === 'number' &&
- remoteHost.port >= 0 &&
- remoteHost.port <= 65535);
+function checkLengthValue(length) {
+ checkOffsetOrLengthValue(length, false);
}
+exports.checkLengthValue = checkLengthValue;
/**
- * Validates a SocksProxy
- * @param proxy { SocksProxy }
+ * Checks if a offset value is valid. (Throws an exception if check fails)
+ *
+ * @param { Number } offset The value to check.
*/
-function isValidSocksProxy(proxy) {
- return (proxy &&
- (typeof proxy.host === 'string' || typeof proxy.ipaddress === 'string') &&
- typeof proxy.port === 'number' &&
- proxy.port >= 0 &&
- proxy.port <= 65535 &&
- (proxy.type === 4 || proxy.type === 5));
+function checkOffsetValue(offset) {
+ checkOffsetOrLengthValue(offset, true);
}
+exports.checkOffsetValue = checkOffsetValue;
/**
- * Validates a timeout value.
- * @param value { Number }
+ * Checks if a target offset value is out of bounds. (Throws an exception if check fails)
+ *
+ * @param { Number } offset The offset value to check.
+ * @param { SmartBuffer } buff The SmartBuffer instance to check against.
*/
-function isValidTimeoutValue(value) {
- return typeof value === 'number' && value > 0;
+function checkTargetOffset(offset, buff) {
+ if (offset < 0 || offset > buff.length) {
+ throw new Error(ERRORS.INVALID_TARGET_OFFSET);
+ }
}
-//# sourceMappingURL=helpers.js.map
+exports.checkTargetOffset = checkTargetOffset;
+/**
+ * Determines whether a given number is a integer.
+ * @param value The number to check.
+ */
+function isInteger(value) {
+ return typeof value === 'number' && isFinite(value) && Math.floor(value) === value;
+}
+/**
+ * Throws if Node.js version is too low to support bigint
+ */
+function bigIntAndBufferInt64Check(bufferMethod) {
+ if (typeof BigInt === 'undefined') {
+ throw new Error('Platform does not support JS BigInt type.');
+ }
+ if (typeof buffer_1.Buffer.prototype[bufferMethod] === 'undefined') {
+ throw new Error(`Platform does not support Buffer.prototype.${bufferMethod}.`);
+ }
+}
+exports.bigIntAndBufferInt64Check = bigIntAndBufferInt64Check;
+//# sourceMappingURL=utils.js.map
/***/ }),
-/* 373 */
+/* 281 */
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const internal_globber_1 = __webpack_require__(297);
+/**
+ * Constructs a globber
+ *
+ * @param patterns Patterns separated by newlines
+ * @param options Glob options
+ */
+function create(patterns, options) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return yield internal_globber_1.DefaultGlobber.create(patterns, options);
+ });
+}
+exports.create = create;
+//# sourceMappingURL=glob.js.map
-Object.defineProperty(exports, '__esModule', { value: true });
+/***/ }),
+/* 282 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
-var coreHttp = __webpack_require__(999);
-var tslib = __webpack_require__(815);
-var api = __webpack_require__(440);
-var logger$1 = __webpack_require__(492);
-var abortController = __webpack_require__(106);
-var os = __webpack_require__(87);
-var stream = __webpack_require__(794);
-__webpack_require__(242);
-var crypto = __webpack_require__(417);
-var coreLro = __webpack_require__(889);
-var events = __webpack_require__(614);
-var coreTracing = __webpack_require__(263);
-var fs = __webpack_require__(747);
-var util = __webpack_require__(669);
+"use strict";
-/*
- * Copyright (c) Microsoft Corporation. All rights reserved.
- * Licensed under the MIT License. See License.txt in the project root for license information.
- *
- * Code generated by Microsoft (R) AutoRest Code Generator.
- * Changes may cause incorrect behavior and will be lost if the code is regenerated.
- */
-var KeyInfo = {
- serializedName: "KeyInfo",
- type: {
- name: "Composite",
- className: "KeyInfo",
- modelProperties: {
- startsOn: {
- xmlName: "Start",
- required: true,
- serializedName: "Start",
- type: {
- name: "String"
- }
- },
- expiresOn: {
- xmlName: "Expiry",
- required: true,
- serializedName: "Expiry",
- type: {
- name: "String"
- }
- }
- }
- }
-};
-var UserDelegationKey = {
- serializedName: "UserDelegationKey",
- type: {
- name: "Composite",
- className: "UserDelegationKey",
- modelProperties: {
- signedObjectId: {
- xmlName: "SignedOid",
- required: true,
- serializedName: "SignedOid",
- type: {
- name: "String"
- }
- },
- signedTenantId: {
- xmlName: "SignedTid",
- required: true,
- serializedName: "SignedTid",
- type: {
- name: "String"
- }
- },
- signedStartsOn: {
- xmlName: "SignedStart",
- required: true,
- serializedName: "SignedStart",
- type: {
- name: "String"
- }
- },
- signedExpiresOn: {
- xmlName: "SignedExpiry",
- required: true,
- serializedName: "SignedExpiry",
- type: {
- name: "String"
- }
- },
- signedService: {
- xmlName: "SignedService",
- required: true,
- serializedName: "SignedService",
- type: {
- name: "String"
- }
- },
- signedVersion: {
- xmlName: "SignedVersion",
- required: true,
- serializedName: "SignedVersion",
- type: {
- name: "String"
- }
- },
- value: {
- xmlName: "Value",
- required: true,
- serializedName: "Value",
- type: {
- name: "String"
- }
- }
- }
- }
-};
-var StorageError = {
- serializedName: "StorageError",
- type: {
- name: "Composite",
- className: "StorageError",
- modelProperties: {
- message: {
- xmlName: "Message",
- serializedName: "Message",
- type: {
- name: "String"
- }
- }
- }
+// wrapper around mkdirp for tar's needs.
+
+// TODO: This should probably be a class, not functionally
+// passing around state in a gazillion args.
+
+const mkdirp = __webpack_require__(626)
+const fs = __webpack_require__(747)
+const path = __webpack_require__(622)
+const chownr = __webpack_require__(941)
+
+class SymlinkError extends Error {
+ constructor (symlink, path) {
+ super('Cannot extract through symbolic link')
+ this.path = path
+ this.symlink = symlink
+ }
+
+ get name () {
+ return 'SylinkError'
+ }
+}
+
+class CwdError extends Error {
+ constructor (path, code) {
+ super(code + ': Cannot cd into \'' + path + '\'')
+ this.path = path
+ this.code = code
+ }
+
+ get name () {
+ return 'CwdError'
+ }
+}
+
+const mkdir = module.exports = (dir, opt, cb) => {
+ // if there's any overlap between mask and mode,
+ // then we'll need an explicit chmod
+ const umask = opt.umask
+ const mode = opt.mode | 0o0700
+ const needChmod = (mode & umask) !== 0
+
+ const uid = opt.uid
+ const gid = opt.gid
+ const doChown = typeof uid === 'number' &&
+ typeof gid === 'number' &&
+ ( uid !== opt.processUid || gid !== opt.processGid )
+
+ const preserve = opt.preserve
+ const unlink = opt.unlink
+ const cache = opt.cache
+ const cwd = opt.cwd
+
+ const done = (er, created) => {
+ if (er)
+ cb(er)
+ else {
+ cache.set(dir, true)
+ if (created && doChown)
+ chownr(created, uid, gid, er => done(er))
+ else if (needChmod)
+ fs.chmod(dir, mode, cb)
+ else
+ cb()
}
-};
-var DataLakeStorageErrorError = {
- serializedName: "DataLakeStorageError_error",
- type: {
- name: "Composite",
- className: "DataLakeStorageErrorError",
- modelProperties: {
- code: {
- xmlName: "Code",
- serializedName: "Code",
- type: {
- name: "String"
- }
- },
- message: {
- xmlName: "Message",
- serializedName: "Message",
- type: {
- name: "String"
- }
- }
- }
+ }
+
+ if (cache && cache.get(dir) === true)
+ return done()
+
+ if (dir === cwd)
+ return fs.stat(dir, (er, st) => {
+ if (er || !st.isDirectory())
+ er = new CwdError(dir, er && er.code || 'ENOTDIR')
+ done(er)
+ })
+
+ if (preserve)
+ return mkdirp(dir, mode, done)
+
+ const sub = path.relative(cwd, dir)
+ const parts = sub.split(/\/|\\/)
+ mkdir_(cwd, parts, mode, cache, unlink, cwd, null, done)
+}
+
+const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
+ if (!parts.length)
+ return cb(null, created)
+ const p = parts.shift()
+ const part = base + '/' + p
+ if (cache.get(part))
+ return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
+ fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
+}
+
+const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => {
+ if (er) {
+ if (er.path && path.dirname(er.path) === cwd &&
+ (er.code === 'ENOTDIR' || er.code === 'ENOENT'))
+ return cb(new CwdError(cwd, er.code))
+
+ fs.lstat(part, (statEr, st) => {
+ if (statEr)
+ cb(statEr)
+ else if (st.isDirectory())
+ mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
+ else if (unlink)
+ fs.unlink(part, er => {
+ if (er)
+ return cb(er)
+ fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
+ })
+ else if (st.isSymbolicLink())
+ return cb(new SymlinkError(part, part + '/' + parts.join('/')))
+ else
+ cb(er)
+ })
+ } else {
+ created = created || part
+ mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
+ }
+}
+
+const mkdirSync = module.exports.sync = (dir, opt) => {
+ // if there's any overlap between mask and mode,
+ // then we'll need an explicit chmod
+ const umask = opt.umask
+ const mode = opt.mode | 0o0700
+ const needChmod = (mode & umask) !== 0
+
+ const uid = opt.uid
+ const gid = opt.gid
+ const doChown = typeof uid === 'number' &&
+ typeof gid === 'number' &&
+ ( uid !== opt.processUid || gid !== opt.processGid )
+
+ const preserve = opt.preserve
+ const unlink = opt.unlink
+ const cache = opt.cache
+ const cwd = opt.cwd
+
+ const done = (created) => {
+ cache.set(dir, true)
+ if (created && doChown)
+ chownr.sync(created, uid, gid)
+ if (needChmod)
+ fs.chmodSync(dir, mode)
+ }
+
+ if (cache && cache.get(dir) === true)
+ return done()
+
+ if (dir === cwd) {
+ let ok = false
+ let code = 'ENOTDIR'
+ try {
+ ok = fs.statSync(dir).isDirectory()
+ } catch (er) {
+ code = er.code
+ } finally {
+ if (!ok)
+ throw new CwdError(dir, code)
}
-};
-var DataLakeStorageError = {
- serializedName: "DataLakeStorageError",
- type: {
- name: "Composite",
- className: "DataLakeStorageError",
- modelProperties: {
- dataLakeStorageErrorDetails: {
- xmlName: "error",
- serializedName: "error",
- type: {
- name: "Composite",
- className: "DataLakeStorageErrorError"
- }
- }
- }
+ done()
+ return
+ }
+
+ if (preserve)
+ return done(mkdirp.sync(dir, mode))
+
+ const sub = path.relative(cwd, dir)
+ const parts = sub.split(/\/|\\/)
+ let created = null
+ for (let p = parts.shift(), part = cwd;
+ p && (part += '/' + p);
+ p = parts.shift()) {
+
+ if (cache.get(part))
+ continue
+
+ try {
+ fs.mkdirSync(part, mode)
+ created = created || part
+ cache.set(part, true)
+ } catch (er) {
+ if (er.path && path.dirname(er.path) === cwd &&
+ (er.code === 'ENOTDIR' || er.code === 'ENOENT'))
+ return new CwdError(cwd, er.code)
+
+ const st = fs.lstatSync(part)
+ if (st.isDirectory()) {
+ cache.set(part, true)
+ continue
+ } else if (unlink) {
+ fs.unlinkSync(part)
+ fs.mkdirSync(part, mode)
+ created = created || part
+ cache.set(part, true)
+ continue
+ } else if (st.isSymbolicLink())
+ return new SymlinkError(part, part + '/' + parts.join('/'))
}
-};
-var AccessPolicy = {
- serializedName: "AccessPolicy",
- type: {
- name: "Composite",
- className: "AccessPolicy",
- modelProperties: {
- startsOn: {
- xmlName: "Start",
- serializedName: "Start",
- type: {
- name: "String"
- }
- },
- expiresOn: {
- xmlName: "Expiry",
- serializedName: "Expiry",
- type: {
- name: "String"
- }
- },
- permissions: {
- xmlName: "Permission",
- serializedName: "Permission",
- type: {
- name: "String"
- }
- }
- }
+ }
+
+ return done(created)
+}
+
+
+/***/ }),
+/* 283 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+const compare = __webpack_require__(874)
+const compareLoose = (a, b) => compare(a, b, true)
+module.exports = compareLoose
+
+
+/***/ }),
+/* 284 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+var once = __webpack_require__(49)
+var eos = __webpack_require__(3)
+var fs = __webpack_require__(747) // we only need fs to get the ReadStream and WriteStream prototypes
+
+var noop = function () {}
+var ancient = /^v?\.0/.test(process.version)
+
+var isFn = function (fn) {
+ return typeof fn === 'function'
+}
+
+var isFS = function (stream) {
+ if (!ancient) return false // newer node version do not need to care about fs is a special way
+ if (!fs) return false // browser
+ return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close)
+}
+
+var isRequest = function (stream) {
+ return stream.setHeader && isFn(stream.abort)
+}
+
+var destroyer = function (stream, reading, writing, callback) {
+ callback = once(callback)
+
+ var closed = false
+ stream.on('close', function () {
+ closed = true
+ })
+
+ eos(stream, {readable: reading, writable: writing}, function (err) {
+ if (err) return callback(err)
+ closed = true
+ callback()
+ })
+
+ var destroyed = false
+ return function (err) {
+ if (closed) return
+ if (destroyed) return
+ destroyed = true
+
+ if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks
+ if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want
+
+ if (isFn(stream.destroy)) return stream.destroy()
+
+ callback(err || new Error('stream was destroyed'))
+ }
+}
+
+var call = function (fn) {
+ fn()
+}
+
+var pipe = function (from, to) {
+ return from.pipe(to)
+}
+
+var pump = function () {
+ var streams = Array.prototype.slice.call(arguments)
+ var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop
+
+ if (Array.isArray(streams[0])) streams = streams[0]
+ if (streams.length < 2) throw new Error('pump requires two streams per minimum')
+
+ var error
+ var destroys = streams.map(function (stream, i) {
+ var reading = i < streams.length - 1
+ var writing = i > 0
+ return destroyer(stream, reading, writing, function (err) {
+ if (!error) error = err
+ if (err) destroys.forEach(call)
+ if (reading) return
+ destroys.forEach(call)
+ callback(error)
+ })
+ })
+
+ return streams.reduce(pipe)
+}
+
+module.exports = pump
+
+
+/***/ }),
+/* 285 */
+/***/ (function(module) {
+
+"use strict";
+
+
+function isArguments (thingy) {
+ return thingy != null && typeof thingy === 'object' && thingy.hasOwnProperty('callee')
+}
+
+var types = {
+ '*': {label: 'any', check: function () { return true }},
+ A: {label: 'array', check: function (thingy) { return Array.isArray(thingy) || isArguments(thingy) }},
+ S: {label: 'string', check: function (thingy) { return typeof thingy === 'string' }},
+ N: {label: 'number', check: function (thingy) { return typeof thingy === 'number' }},
+ F: {label: 'function', check: function (thingy) { return typeof thingy === 'function' }},
+ O: {label: 'object', check: function (thingy) { return typeof thingy === 'object' && thingy != null && !types.A.check(thingy) && !types.E.check(thingy) }},
+ B: {label: 'boolean', check: function (thingy) { return typeof thingy === 'boolean' }},
+ E: {label: 'error', check: function (thingy) { return thingy instanceof Error }},
+ Z: {label: 'null', check: function (thingy) { return thingy == null }}
+}
+
+function addSchema (schema, arity) {
+ var group = arity[schema.length] = arity[schema.length] || []
+ if (group.indexOf(schema) === -1) group.push(schema)
+}
+
+var validate = module.exports = function (rawSchemas, args) {
+ if (arguments.length !== 2) throw wrongNumberOfArgs(['SA'], arguments.length)
+ if (!rawSchemas) throw missingRequiredArg(0, 'rawSchemas')
+ if (!args) throw missingRequiredArg(1, 'args')
+ if (!types.S.check(rawSchemas)) throw invalidType(0, ['string'], rawSchemas)
+ if (!types.A.check(args)) throw invalidType(1, ['array'], args)
+ var schemas = rawSchemas.split('|')
+ var arity = {}
+
+ schemas.forEach(function (schema) {
+ for (var ii = 0; ii < schema.length; ++ii) {
+ var type = schema[ii]
+ if (!types[type]) throw unknownType(ii, type)
}
-};
-var BlobPropertiesInternal = {
- xmlName: "Properties",
- serializedName: "BlobPropertiesInternal",
- type: {
- name: "Composite",
- className: "BlobPropertiesInternal",
- modelProperties: {
- createdOn: {
- xmlName: "Creation-Time",
- serializedName: "Creation-Time",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- lastModified: {
- xmlName: "Last-Modified",
- required: true,
- serializedName: "Last-Modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- etag: {
- xmlName: "Etag",
- required: true,
- serializedName: "Etag",
- type: {
- name: "String"
- }
- },
- contentLength: {
- xmlName: "Content-Length",
- serializedName: "Content-Length",
- type: {
- name: "Number"
- }
- },
- contentType: {
- xmlName: "Content-Type",
- serializedName: "Content-Type",
- type: {
- name: "String"
- }
- },
- contentEncoding: {
- xmlName: "Content-Encoding",
- serializedName: "Content-Encoding",
- type: {
- name: "String"
- }
- },
- contentLanguage: {
- xmlName: "Content-Language",
- serializedName: "Content-Language",
- type: {
- name: "String"
- }
- },
- contentMD5: {
- xmlName: "Content-MD5",
- serializedName: "Content-MD5",
- type: {
- name: "ByteArray"
- }
- },
- contentDisposition: {
- xmlName: "Content-Disposition",
- serializedName: "Content-Disposition",
- type: {
- name: "String"
- }
- },
- cacheControl: {
- xmlName: "Cache-Control",
- serializedName: "Cache-Control",
- type: {
- name: "String"
- }
- },
- blobSequenceNumber: {
- xmlName: "x-ms-blob-sequence-number",
- serializedName: "x-ms-blob-sequence-number",
- type: {
- name: "Number"
- }
- },
- blobType: {
- xmlName: "BlobType",
- serializedName: "BlobType",
- type: {
- name: "Enum",
- allowedValues: [
- "BlockBlob",
- "PageBlob",
- "AppendBlob"
- ]
- }
- },
- leaseStatus: {
- xmlName: "LeaseStatus",
- serializedName: "LeaseStatus",
- type: {
- name: "Enum",
- allowedValues: [
- "locked",
- "unlocked"
- ]
- }
- },
- leaseState: {
- xmlName: "LeaseState",
- serializedName: "LeaseState",
- type: {
- name: "Enum",
- allowedValues: [
- "available",
- "leased",
- "expired",
- "breaking",
- "broken"
- ]
- }
- },
- leaseDuration: {
- xmlName: "LeaseDuration",
- serializedName: "LeaseDuration",
- type: {
- name: "Enum",
- allowedValues: [
- "infinite",
- "fixed"
- ]
- }
- },
- copyId: {
- xmlName: "CopyId",
- serializedName: "CopyId",
- type: {
- name: "String"
- }
- },
- copyStatus: {
- xmlName: "CopyStatus",
- serializedName: "CopyStatus",
- type: {
- name: "Enum",
- allowedValues: [
- "pending",
- "success",
- "aborted",
- "failed"
- ]
- }
- },
- copySource: {
- xmlName: "CopySource",
- serializedName: "CopySource",
- type: {
- name: "String"
- }
- },
- copyProgress: {
- xmlName: "CopyProgress",
- serializedName: "CopyProgress",
- type: {
- name: "String"
- }
- },
- copyCompletedOn: {
- xmlName: "CopyCompletionTime",
- serializedName: "CopyCompletionTime",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- copyStatusDescription: {
- xmlName: "CopyStatusDescription",
- serializedName: "CopyStatusDescription",
- type: {
- name: "String"
- }
- },
- serverEncrypted: {
- xmlName: "ServerEncrypted",
- serializedName: "ServerEncrypted",
- type: {
- name: "Boolean"
- }
- },
- incrementalCopy: {
- xmlName: "IncrementalCopy",
- serializedName: "IncrementalCopy",
- type: {
- name: "Boolean"
- }
- },
- destinationSnapshot: {
- xmlName: "DestinationSnapshot",
- serializedName: "DestinationSnapshot",
- type: {
- name: "String"
- }
- },
- deletedOn: {
- xmlName: "DeletedTime",
- serializedName: "DeletedTime",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- remainingRetentionDays: {
- xmlName: "RemainingRetentionDays",
- serializedName: "RemainingRetentionDays",
- type: {
- name: "Number"
- }
- },
- accessTier: {
- xmlName: "AccessTier",
- serializedName: "AccessTier",
- type: {
- name: "String"
- }
- },
- accessTierInferred: {
- xmlName: "AccessTierInferred",
- serializedName: "AccessTierInferred",
- type: {
- name: "Boolean"
- }
- },
- archiveStatus: {
- xmlName: "ArchiveStatus",
- serializedName: "ArchiveStatus",
- type: {
- name: "String"
- }
- },
- customerProvidedKeySha256: {
- xmlName: "CustomerProvidedKeySha256",
- serializedName: "CustomerProvidedKeySha256",
- type: {
- name: "String"
- }
- },
- encryptionScope: {
- xmlName: "EncryptionScope",
- serializedName: "EncryptionScope",
- type: {
- name: "String"
- }
- },
- accessTierChangedOn: {
- xmlName: "AccessTierChangeTime",
- serializedName: "AccessTierChangeTime",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- tagCount: {
- xmlName: "TagCount",
- serializedName: "TagCount",
- type: {
- name: "Number"
- }
- },
- expiresOn: {
- xmlName: "Expiry-Time",
- serializedName: "Expiry-Time",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- isSealed: {
- xmlName: "Sealed",
- serializedName: "Sealed",
- type: {
- name: "Boolean"
- }
- },
- rehydratePriority: {
- xmlName: "RehydratePriority",
- serializedName: "RehydratePriority",
- type: {
- name: "String"
- }
- }
- }
+ if (/E.*E/.test(schema)) throw moreThanOneError(schema)
+ addSchema(schema, arity)
+ if (/E/.test(schema)) {
+ addSchema(schema.replace(/E.*$/, 'E'), arity)
+ addSchema(schema.replace(/E/, 'Z'), arity)
+ if (schema.length === 1) addSchema('', arity)
}
-};
-var BlobTag = {
- xmlName: "Tag",
- serializedName: "BlobTag",
- type: {
- name: "Composite",
- className: "BlobTag",
- modelProperties: {
- key: {
- xmlName: "Key",
- required: true,
- serializedName: "Key",
- type: {
- name: "String"
- }
- },
- value: {
- xmlName: "Value",
- required: true,
- serializedName: "Value",
- type: {
- name: "String"
- }
- }
- }
+ })
+ var matching = arity[args.length]
+ if (!matching) {
+ throw wrongNumberOfArgs(Object.keys(arity), args.length)
+ }
+ for (var ii = 0; ii < args.length; ++ii) {
+ var newMatching = matching.filter(function (schema) {
+ var type = schema[ii]
+ var typeCheck = types[type].check
+ return typeCheck(args[ii])
+ })
+ if (!newMatching.length) {
+ var labels = matching.map(function (schema) {
+ return types[schema[ii]].label
+ }).filter(function (schema) { return schema != null })
+ throw invalidType(ii, labels, args[ii])
}
+ matching = newMatching
+ }
+}
+
+function missingRequiredArg (num) {
+ return newException('EMISSINGARG', 'Missing required argument #' + (num + 1))
+}
+
+function unknownType (num, type) {
+ return newException('EUNKNOWNTYPE', 'Unknown type ' + type + ' in argument #' + (num + 1))
+}
+
+function invalidType (num, expectedTypes, value) {
+ var valueType
+ Object.keys(types).forEach(function (typeCode) {
+ if (types[typeCode].check(value)) valueType = types[typeCode].label
+ })
+ return newException('EINVALIDTYPE', 'Argument #' + (num + 1) + ': Expected ' +
+ englishList(expectedTypes) + ' but got ' + valueType)
+}
+
+function englishList (list) {
+ return list.join(', ').replace(/, ([^,]+)$/, ' or $1')
+}
+
+function wrongNumberOfArgs (expected, got) {
+ var english = englishList(expected)
+ var args = expected.every(function (ex) { return ex.length === 1 })
+ ? 'argument'
+ : 'arguments'
+ return newException('EWRONGARGCOUNT', 'Expected ' + english + ' ' + args + ' but got ' + got)
+}
+
+function moreThanOneError (schema) {
+ return newException('ETOOMANYERRORTYPES',
+ 'Only one error type per argument signature is allowed, more than one found in "' + schema + '"')
+}
+
+function newException (code, msg) {
+ var e = new Error(msg)
+ e.code = code
+ if (Error.captureStackTrace) Error.captureStackTrace(e, validate)
+ return e
+}
+
+
+/***/ }),
+/* 286 */
+/***/ (function(__unusedmodule, exports) {
+
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+// NOTE: These type checking functions intentionally don't use `instanceof`
+// because it is fragile and can be easily faked with `Object.create()`.
+
+function isArray(arg) {
+ if (Array.isArray) {
+ return Array.isArray(arg);
+ }
+ return objectToString(arg) === '[object Array]';
+}
+exports.isArray = isArray;
+
+function isBoolean(arg) {
+ return typeof arg === 'boolean';
+}
+exports.isBoolean = isBoolean;
+
+function isNull(arg) {
+ return arg === null;
+}
+exports.isNull = isNull;
+
+function isNullOrUndefined(arg) {
+ return arg == null;
+}
+exports.isNullOrUndefined = isNullOrUndefined;
+
+function isNumber(arg) {
+ return typeof arg === 'number';
+}
+exports.isNumber = isNumber;
+
+function isString(arg) {
+ return typeof arg === 'string';
+}
+exports.isString = isString;
+
+function isSymbol(arg) {
+ return typeof arg === 'symbol';
+}
+exports.isSymbol = isSymbol;
+
+function isUndefined(arg) {
+ return arg === void 0;
+}
+exports.isUndefined = isUndefined;
+
+function isRegExp(re) {
+ return objectToString(re) === '[object RegExp]';
+}
+exports.isRegExp = isRegExp;
+
+function isObject(arg) {
+ return typeof arg === 'object' && arg !== null;
+}
+exports.isObject = isObject;
+
+function isDate(d) {
+ return objectToString(d) === '[object Date]';
+}
+exports.isDate = isDate;
+
+function isError(e) {
+ return (objectToString(e) === '[object Error]' || e instanceof Error);
+}
+exports.isError = isError;
+
+function isFunction(arg) {
+ return typeof arg === 'function';
+}
+exports.isFunction = isFunction;
+
+function isPrimitive(arg) {
+ return arg === null ||
+ typeof arg === 'boolean' ||
+ typeof arg === 'number' ||
+ typeof arg === 'string' ||
+ typeof arg === 'symbol' || // ES6 symbol
+ typeof arg === 'undefined';
+}
+exports.isPrimitive = isPrimitive;
+
+exports.isBuffer = Buffer.isBuffer;
+
+function objectToString(o) {
+ return Object.prototype.toString.call(o);
+}
+
+
+/***/ }),
+/* 287 */,
+/* 288 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+
+// Update this array if you add/rename/remove files in this directory.
+// We support Browserify by skipping automatic module discovery and requiring modules directly.
+var modules = [
+ __webpack_require__(162),
+ __webpack_require__(640),
+ __webpack_require__(797),
+ __webpack_require__(645),
+ __webpack_require__(877),
+ __webpack_require__(762),
+ __webpack_require__(28),
+ __webpack_require__(189),
+ __webpack_require__(92),
+];
+
+// Put all encoding/alias/codec definitions to single object and export it.
+for (var i = 0; i < modules.length; i++) {
+ var module = modules[i];
+ for (var enc in module)
+ if (Object.prototype.hasOwnProperty.call(module, enc))
+ exports[enc] = module[enc];
+}
+
+
+/***/ }),
+/* 289 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+
+// tar -x
+const hlo = __webpack_require__(891)
+const Unpack = __webpack_require__(63)
+const fs = __webpack_require__(747)
+const fsm = __webpack_require__(827)
+const path = __webpack_require__(622)
+
+const x = module.exports = (opt_, files, cb) => {
+ if (typeof opt_ === 'function')
+ cb = opt_, files = null, opt_ = {}
+ else if (Array.isArray(opt_))
+ files = opt_, opt_ = {}
+
+ if (typeof files === 'function')
+ cb = files, files = null
+
+ if (!files)
+ files = []
+ else
+ files = Array.from(files)
+
+ const opt = hlo(opt_)
+
+ if (opt.sync && typeof cb === 'function')
+ throw new TypeError('callback not supported for sync tar functions')
+
+ if (!opt.file && typeof cb === 'function')
+ throw new TypeError('callback only supported with file option')
+
+ if (files.length)
+ filesFilter(opt, files)
+
+ return opt.file && opt.sync ? extractFileSync(opt)
+ : opt.file ? extractFile(opt, cb)
+ : opt.sync ? extractSync(opt)
+ : extract(opt)
+}
+
+// construct a filter that limits the file entries listed
+// include child entries if a dir is included
+const filesFilter = (opt, files) => {
+ const map = new Map(files.map(f => [f.replace(/\/+$/, ''), true]))
+ const filter = opt.filter
+
+ const mapHas = (file, r) => {
+ const root = r || path.parse(file).root || '.'
+ const ret = file === root ? false
+ : map.has(file) ? map.get(file)
+ : mapHas(path.dirname(file), root)
+
+ map.set(file, ret)
+ return ret
+ }
+
+ opt.filter = filter
+ ? (file, entry) => filter(file, entry) && mapHas(file.replace(/\/+$/, ''))
+ : file => mapHas(file.replace(/\/+$/, ''))
+}
+
+const extractFileSync = opt => {
+ const u = new Unpack.Sync(opt)
+
+ const file = opt.file
+ let threw = true
+ let fd
+ const stat = fs.statSync(file)
+ // This trades a zero-byte read() syscall for a stat
+ // However, it will usually result in less memory allocation
+ const readSize = opt.maxReadSize || 16*1024*1024
+ const stream = new fsm.ReadStreamSync(file, {
+ readSize: readSize,
+ size: stat.size
+ })
+ stream.pipe(u)
+}
+
+const extractFile = (opt, cb) => {
+ const u = new Unpack(opt)
+ const readSize = opt.maxReadSize || 16*1024*1024
+
+ const file = opt.file
+ const p = new Promise((resolve, reject) => {
+ u.on('error', reject)
+ u.on('close', resolve)
+
+ // This trades a zero-byte read() syscall for a stat
+ // However, it will usually result in less memory allocation
+ fs.stat(file, (er, stat) => {
+ if (er)
+ reject(er)
+ else {
+ const stream = new fsm.ReadStream(file, {
+ readSize: readSize,
+ size: stat.size
+ })
+ stream.on('error', reject)
+ stream.pipe(u)
+ }
+ })
+ })
+ return cb ? p.then(cb, cb) : p
+}
+
+const extractSync = opt => {
+ return new Unpack.Sync(opt)
+}
+
+const extract = opt => {
+ return new Unpack(opt)
+}
+
+
+/***/ }),
+/* 290 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+
+module.exports = __webpack_require__(305)
+
+
+/***/ }),
+/* 291 */,
+/* 292 */,
+/* 293 */
+/***/ (function(module) {
+
+module.exports = require("buffer");
+
+/***/ }),
+/* 294 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+const fs = __webpack_require__(747);
+
+module.exports = fp => new Promise(resolve => {
+ fs.access(fp, err => {
+ resolve(!err);
+ });
+});
+
+module.exports.sync = fp => {
+ try {
+ fs.accessSync(fp);
+ return true;
+ } catch (err) {
+ return false;
+ }
};
-var BlobTags = {
- xmlName: "Tags",
- serializedName: "BlobTags",
- type: {
- name: "Composite",
- className: "BlobTags",
- modelProperties: {
- blobTagSet: {
- xmlIsWrapped: true,
- xmlName: "TagSet",
- xmlElementName: "Tag",
- required: true,
- serializedName: "BlobTagSet",
- type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "BlobTag"
- }
- }
+
+
+/***/ }),
+/* 295 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+const url = __webpack_require__(835)
+
+module.exports = setWarning
+
+function setWarning (reqOrRes, code, message, replace) {
+ // Warning = "Warning" ":" 1#warning-value
+ // warning-value = warn-code SP warn-agent SP warn-text [SP warn-date]
+ // warn-code = 3DIGIT
+ // warn-agent = ( host [ ":" port ] ) | pseudonym
+ // ; the name or pseudonym of the server adding
+ // ; the Warning header, for use in debugging
+ // warn-text = quoted-string
+ // warn-date = <"> HTTP-date <">
+ // (https://tools.ietf.org/html/rfc2616#section-14.46)
+ const host = url.parse(reqOrRes.url).host
+ const jsonMessage = JSON.stringify(message)
+ const jsonDate = JSON.stringify(new Date().toUTCString())
+ const header = replace ? 'set' : 'append'
+
+ reqOrRes.headers[header](
+ 'Warning',
+ `${code} ${host} ${jsonMessage} ${jsonDate}`
+ )
+}
+
+
+/***/ }),
+/* 296 */,
+/* 297 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __asyncValues = (this && this.__asyncValues) || function (o) {
+ if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
+ var m = o[Symbol.asyncIterator], i;
+ return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
+ function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
+ function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
+};
+var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); }
+var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) {
+ if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
+ var g = generator.apply(thisArg, _arguments || []), i, q = [];
+ return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
+ function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
+ function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
+ function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
+ function fulfill(value) { resume("next", value); }
+ function reject(value) { resume("throw", value); }
+ function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const core = __webpack_require__(470);
+const fs = __webpack_require__(747);
+const globOptionsHelper = __webpack_require__(601);
+const path = __webpack_require__(622);
+const patternHelper = __webpack_require__(597);
+const internal_match_kind_1 = __webpack_require__(327);
+const internal_pattern_1 = __webpack_require__(923);
+const internal_search_state_1 = __webpack_require__(728);
+const IS_WINDOWS = process.platform === 'win32';
+class DefaultGlobber {
+ constructor(options) {
+ this.patterns = [];
+ this.searchPaths = [];
+ this.options = globOptionsHelper.getOptions(options);
+ }
+ getSearchPaths() {
+ // Return a copy
+ return this.searchPaths.slice();
+ }
+ glob() {
+ var e_1, _a;
+ return __awaiter(this, void 0, void 0, function* () {
+ const result = [];
+ try {
+ for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) {
+ const itemPath = _c.value;
+ result.push(itemPath);
}
}
- }
+ catch (e_1_1) { e_1 = { error: e_1_1 }; }
+ finally {
+ try {
+ if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b);
+ }
+ finally { if (e_1) throw e_1.error; }
+ }
+ return result;
+ });
}
-};
-var BlobItemInternal = {
- xmlName: "Blob",
- serializedName: "BlobItemInternal",
- type: {
- name: "Composite",
- className: "BlobItemInternal",
- modelProperties: {
- name: {
- xmlName: "Name",
- required: true,
- serializedName: "Name",
- type: {
- name: "String"
- }
- },
- deleted: {
- xmlName: "Deleted",
- required: true,
- serializedName: "Deleted",
- type: {
- name: "Boolean"
- }
- },
- snapshot: {
- xmlName: "Snapshot",
- required: true,
- serializedName: "Snapshot",
- type: {
- name: "String"
- }
- },
- versionId: {
- xmlName: "VersionId",
- serializedName: "VersionId",
- type: {
- name: "String"
- }
- },
- isCurrentVersion: {
- xmlName: "IsCurrentVersion",
- serializedName: "IsCurrentVersion",
- type: {
- name: "Boolean"
- }
- },
- properties: {
- xmlName: "Properties",
- required: true,
- serializedName: "Properties",
- type: {
- name: "Composite",
- className: "BlobPropertiesInternal"
- }
- },
- metadata: {
- xmlName: "Metadata",
- serializedName: "Metadata",
- type: {
- name: "Dictionary",
- value: {
- type: {
- name: "String"
- }
- }
- }
- },
- blobTags: {
- xmlName: "Tags",
- serializedName: "BlobTags",
- type: {
- name: "Composite",
- className: "BlobTags"
- }
- },
- objectReplicationMetadata: {
- xmlName: "OrMetadata",
- serializedName: "ObjectReplicationMetadata",
- type: {
- name: "Dictionary",
- value: {
- type: {
- name: "String"
- }
- }
+ globGenerator() {
+ return __asyncGenerator(this, arguments, function* globGenerator_1() {
+ // Fill in defaults options
+ const options = globOptionsHelper.getOptions(this.options);
+ // Implicit descendants?
+ const patterns = [];
+ for (const pattern of this.patterns) {
+ patterns.push(pattern);
+ if (options.implicitDescendants &&
+ (pattern.trailingSeparator ||
+ pattern.segments[pattern.segments.length - 1] !== '**')) {
+ patterns.push(new internal_pattern_1.Pattern(pattern.negate, pattern.segments.concat('**')));
}
}
- }
- }
-};
-var BlobFlatListSegment = {
- xmlName: "Blobs",
- serializedName: "BlobFlatListSegment",
- type: {
- name: "Composite",
- className: "BlobFlatListSegment",
- modelProperties: {
- blobItems: {
- xmlName: "BlobItems",
- xmlElementName: "Blob",
- required: true,
- serializedName: "BlobItems",
- type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "BlobItemInternal"
- }
+ // Push the search paths
+ const stack = [];
+ for (const searchPath of patternHelper.getSearchPaths(patterns)) {
+ core.debug(`Search path '${searchPath}'`);
+ // Exists?
+ try {
+ // Intentionally using lstat. Detection for broken symlink
+ // will be performed later (if following symlinks).
+ yield __await(fs.promises.lstat(searchPath));
+ }
+ catch (err) {
+ if (err.code === 'ENOENT') {
+ continue;
}
+ throw err;
}
+ stack.unshift(new internal_search_state_1.SearchState(searchPath, 1));
}
- }
- }
-};
-var ListBlobsFlatSegmentResponse = {
- xmlName: "EnumerationResults",
- serializedName: "ListBlobsFlatSegmentResponse",
- type: {
- name: "Composite",
- className: "ListBlobsFlatSegmentResponse",
- modelProperties: {
- serviceEndpoint: {
- xmlIsAttribute: true,
- xmlName: "ServiceEndpoint",
- required: true,
- serializedName: "ServiceEndpoint",
- type: {
- name: "String"
- }
- },
- containerName: {
- xmlIsAttribute: true,
- xmlName: "ContainerName",
- required: true,
- serializedName: "ContainerName",
- type: {
- name: "String"
- }
- },
- prefix: {
- xmlName: "Prefix",
- serializedName: "Prefix",
- type: {
- name: "String"
- }
- },
- marker: {
- xmlName: "Marker",
- serializedName: "Marker",
- type: {
- name: "String"
- }
- },
- maxPageSize: {
- xmlName: "MaxResults",
- serializedName: "MaxResults",
- type: {
- name: "Number"
- }
- },
- segment: {
- xmlName: "Blobs",
- required: true,
- serializedName: "Segment",
- type: {
- name: "Composite",
- className: "BlobFlatListSegment"
- }
- },
- continuationToken: {
- xmlName: "NextMarker",
- serializedName: "NextMarker",
- type: {
- name: "String"
+ // Search
+ const traversalChain = []; // used to detect cycles
+ while (stack.length) {
+ // Pop
+ const item = stack.pop();
+ // Match?
+ const match = patternHelper.match(patterns, item.path);
+ const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path);
+ if (!match && !partialMatch) {
+ continue;
}
- }
- }
- }
-};
-var BlobPrefix = {
- serializedName: "BlobPrefix",
- type: {
- name: "Composite",
- className: "BlobPrefix",
- modelProperties: {
- name: {
- xmlName: "Name",
- required: true,
- serializedName: "Name",
- type: {
- name: "String"
+ // Stat
+ const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain)
+ // Broken symlink, or symlink cycle detected, or no longer exists
+ );
+ // Broken symlink, or symlink cycle detected, or no longer exists
+ if (!stats) {
+ continue;
}
- }
- }
- }
-};
-var BlobHierarchyListSegment = {
- xmlName: "Blobs",
- serializedName: "BlobHierarchyListSegment",
- type: {
- name: "Composite",
- className: "BlobHierarchyListSegment",
- modelProperties: {
- blobPrefixes: {
- xmlName: "BlobPrefixes",
- xmlElementName: "BlobPrefix",
- serializedName: "BlobPrefixes",
- type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "BlobPrefix"
- }
+ // Directory
+ if (stats.isDirectory()) {
+ // Matched
+ if (match & internal_match_kind_1.MatchKind.Directory) {
+ yield yield __await(item.path);
}
- }
- },
- blobItems: {
- xmlName: "BlobItems",
- xmlElementName: "Blob",
- required: true,
- serializedName: "BlobItems",
- type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "BlobItemInternal"
- }
+ // Descend?
+ else if (!partialMatch) {
+ continue;
}
+ // Push the child items in reverse
+ const childLevel = item.level + 1;
+ const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel));
+ stack.push(...childItems.reverse());
}
- }
- }
- }
-};
-var ListBlobsHierarchySegmentResponse = {
- xmlName: "EnumerationResults",
- serializedName: "ListBlobsHierarchySegmentResponse",
- type: {
- name: "Composite",
- className: "ListBlobsHierarchySegmentResponse",
- modelProperties: {
- serviceEndpoint: {
- xmlIsAttribute: true,
- xmlName: "ServiceEndpoint",
- required: true,
- serializedName: "ServiceEndpoint",
- type: {
- name: "String"
- }
- },
- containerName: {
- xmlIsAttribute: true,
- xmlName: "ContainerName",
- required: true,
- serializedName: "ContainerName",
- type: {
- name: "String"
- }
- },
- prefix: {
- xmlName: "Prefix",
- serializedName: "Prefix",
- type: {
- name: "String"
- }
- },
- marker: {
- xmlName: "Marker",
- serializedName: "Marker",
- type: {
- name: "String"
- }
- },
- maxPageSize: {
- xmlName: "MaxResults",
- serializedName: "MaxResults",
- type: {
- name: "Number"
- }
- },
- delimiter: {
- xmlName: "Delimiter",
- serializedName: "Delimiter",
- type: {
- name: "String"
- }
- },
- segment: {
- xmlName: "Blobs",
- required: true,
- serializedName: "Segment",
- type: {
- name: "Composite",
- className: "BlobHierarchyListSegment"
- }
- },
- continuationToken: {
- xmlName: "NextMarker",
- serializedName: "NextMarker",
- type: {
- name: "String"
+ // File
+ else if (match & internal_match_kind_1.MatchKind.File) {
+ yield yield __await(item.path);
}
}
- }
+ });
}
-};
-var Block = {
- serializedName: "Block",
- type: {
- name: "Composite",
- className: "Block",
- modelProperties: {
- name: {
- xmlName: "Name",
- required: true,
- serializedName: "Name",
- type: {
- name: "String"
+ /**
+ * Constructs a DefaultGlobber
+ */
+ static create(patterns, options) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const result = new DefaultGlobber(options);
+ if (IS_WINDOWS) {
+ patterns = patterns.replace(/\r\n/g, '\n');
+ patterns = patterns.replace(/\r/g, '\n');
+ }
+ const lines = patterns.split('\n').map(x => x.trim());
+ for (const line of lines) {
+ // Empty or comment
+ if (!line || line.startsWith('#')) {
+ continue;
}
- },
- size: {
- xmlName: "Size",
- required: true,
- serializedName: "Size",
- type: {
- name: "Number"
+ // Pattern
+ else {
+ result.patterns.push(new internal_pattern_1.Pattern(line));
}
}
- }
+ result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns));
+ return result;
+ });
}
-};
-var BlockList = {
- serializedName: "BlockList",
- type: {
- name: "Composite",
- className: "BlockList",
- modelProperties: {
- committedBlocks: {
- xmlIsWrapped: true,
- xmlName: "CommittedBlocks",
- xmlElementName: "Block",
- serializedName: "CommittedBlocks",
- type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "Block"
- }
- }
+ static stat(item, options, traversalChain) {
+ return __awaiter(this, void 0, void 0, function* () {
+ // Note:
+ // `stat` returns info about the target of a symlink (or symlink chain)
+ // `lstat` returns info about a symlink itself
+ let stats;
+ if (options.followSymbolicLinks) {
+ try {
+ // Use `stat` (following symlinks)
+ stats = yield fs.promises.stat(item.path);
}
- },
- uncommittedBlocks: {
- xmlIsWrapped: true,
- xmlName: "UncommittedBlocks",
- xmlElementName: "Block",
- serializedName: "UncommittedBlocks",
- type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "Block"
+ catch (err) {
+ if (err.code === 'ENOENT') {
+ if (options.omitBrokenSymbolicLinks) {
+ core.debug(`Broken symlink '${item.path}'`);
+ return undefined;
}
+ throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`);
}
+ throw err;
}
}
- }
- }
-};
-var BlockLookupList = {
- xmlName: "BlockList",
- serializedName: "BlockLookupList",
- type: {
- name: "Composite",
- className: "BlockLookupList",
- modelProperties: {
- committed: {
- xmlName: "Committed",
- xmlElementName: "Committed",
- serializedName: "Committed",
- type: {
- name: "Sequence",
- element: {
- type: {
- name: "String"
- }
- }
- }
- },
- uncommitted: {
- xmlName: "Uncommitted",
- xmlElementName: "Uncommitted",
- serializedName: "Uncommitted",
- type: {
- name: "Sequence",
- element: {
- type: {
- name: "String"
- }
- }
+ else {
+ // Use `lstat` (not following symlinks)
+ stats = yield fs.promises.lstat(item.path);
+ }
+ // Note, isDirectory() returns false for the lstat of a symlink
+ if (stats.isDirectory() && options.followSymbolicLinks) {
+ // Get the realpath
+ const realPath = yield fs.promises.realpath(item.path);
+ // Fixup the traversal chain to match the item level
+ while (traversalChain.length >= item.level) {
+ traversalChain.pop();
}
- },
- latest: {
- xmlName: "Latest",
- xmlElementName: "Latest",
- serializedName: "Latest",
- type: {
- name: "Sequence",
- element: {
- type: {
- name: "String"
- }
- }
+ // Test for a cycle
+ if (traversalChain.some((x) => x === realPath)) {
+ core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`);
+ return undefined;
}
+ // Update the traversal chain
+ traversalChain.push(realPath);
}
- }
+ return stats;
+ });
}
-};
-var ContainerProperties = {
- serializedName: "ContainerProperties",
- type: {
- name: "Composite",
- className: "ContainerProperties",
- modelProperties: {
- lastModified: {
- xmlName: "Last-Modified",
- required: true,
- serializedName: "Last-Modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- etag: {
- xmlName: "Etag",
- required: true,
- serializedName: "Etag",
- type: {
- name: "String"
- }
- },
- leaseStatus: {
- xmlName: "LeaseStatus",
- serializedName: "LeaseStatus",
- type: {
- name: "Enum",
- allowedValues: [
- "locked",
- "unlocked"
- ]
- }
- },
- leaseState: {
- xmlName: "LeaseState",
- serializedName: "LeaseState",
- type: {
- name: "Enum",
- allowedValues: [
- "available",
- "leased",
- "expired",
- "breaking",
- "broken"
- ]
- }
- },
- leaseDuration: {
- xmlName: "LeaseDuration",
- serializedName: "LeaseDuration",
- type: {
- name: "Enum",
- allowedValues: [
- "infinite",
- "fixed"
- ]
- }
- },
- publicAccess: {
- xmlName: "PublicAccess",
- serializedName: "PublicAccess",
- type: {
- name: "String"
- }
- },
- hasImmutabilityPolicy: {
- xmlName: "HasImmutabilityPolicy",
- serializedName: "HasImmutabilityPolicy",
- type: {
- name: "Boolean"
- }
- },
- hasLegalHold: {
- xmlName: "HasLegalHold",
- serializedName: "HasLegalHold",
- type: {
- name: "Boolean"
- }
- },
- defaultEncryptionScope: {
- xmlName: "DefaultEncryptionScope",
- serializedName: "DefaultEncryptionScope",
- type: {
- name: "String"
- }
- },
- preventEncryptionScopeOverride: {
- xmlName: "DenyEncryptionScopeOverride",
- serializedName: "DenyEncryptionScopeOverride",
- type: {
- name: "Boolean"
- }
- },
- deletedOn: {
- xmlName: "DeletedTime",
- serializedName: "DeletedTime",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- remainingRetentionDays: {
- xmlName: "RemainingRetentionDays",
- serializedName: "RemainingRetentionDays",
- type: {
- name: "Number"
- }
- }
- }
+}
+exports.DefaultGlobber = DefaultGlobber;
+//# sourceMappingURL=internal-globber.js.map
+
+/***/ }),
+/* 298 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = void 0;
+
+var _v = _interopRequireDefault(__webpack_require__(241));
+
+var _md = _interopRequireDefault(__webpack_require__(245));
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+const v3 = (0, _v.default)('v3', 0x30, _md.default);
+var _default = v3;
+exports.default = _default;
+
+/***/ }),
+/* 299 */
+/***/ (function(module) {
+
+module.exports = function (blocking) {
+ [process.stdout, process.stderr].forEach(function (stream) {
+ if (stream._handle && stream.isTTY && typeof stream._handle.setBlocking === 'function') {
+ stream._handle.setBlocking(blocking)
}
-};
-var ContainerItem = {
- xmlName: "Container",
- serializedName: "ContainerItem",
- type: {
- name: "Composite",
- className: "ContainerItem",
- modelProperties: {
- name: {
- xmlName: "Name",
- required: true,
- serializedName: "Name",
- type: {
- name: "String"
- }
- },
- deleted: {
- xmlName: "Deleted",
- serializedName: "Deleted",
- type: {
- name: "Boolean"
- }
- },
- version: {
- xmlName: "Version",
- serializedName: "Version",
- type: {
- name: "String"
- }
- },
- properties: {
- xmlName: "Properties",
- required: true,
- serializedName: "Properties",
- type: {
- name: "Composite",
- className: "ContainerProperties"
- }
- },
- metadata: {
- xmlName: "Metadata",
- serializedName: "Metadata",
- type: {
- name: "Dictionary",
- value: {
- type: {
- name: "String"
- }
- }
- }
+ })
+}
+
+
+/***/ }),
+/* 300 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+var consoleControl = __webpack_require__(920)
+var ThemeSet = __webpack_require__(570)
+
+var themes = module.exports = new ThemeSet()
+
+themes.addTheme('ASCII', {
+ preProgressbar: '[',
+ postProgressbar: ']',
+ progressbarTheme: {
+ complete: '#',
+ remaining: '.'
+ },
+ activityIndicatorTheme: '-\\|/',
+ preSubsection: '>'
+})
+
+themes.addTheme('colorASCII', themes.getTheme('ASCII'), {
+ progressbarTheme: {
+ preComplete: consoleControl.color('inverse'),
+ complete: ' ',
+ postComplete: consoleControl.color('stopInverse'),
+ preRemaining: consoleControl.color('brightBlack'),
+ remaining: '.',
+ postRemaining: consoleControl.color('reset')
+ }
+})
+
+themes.addTheme('brailleSpinner', {
+ preProgressbar: '⸨',
+ postProgressbar: '⸩',
+ progressbarTheme: {
+ complete: '░',
+ remaining: '⠂'
+ },
+ activityIndicatorTheme: '⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏',
+ preSubsection: '>'
+})
+
+themes.addTheme('colorBrailleSpinner', themes.getTheme('brailleSpinner'), {
+ progressbarTheme: {
+ preComplete: consoleControl.color('inverse'),
+ complete: ' ',
+ postComplete: consoleControl.color('stopInverse'),
+ preRemaining: consoleControl.color('brightBlack'),
+ remaining: '░',
+ postRemaining: consoleControl.color('reset')
+ }
+})
+
+themes.setDefault({}, 'ASCII')
+themes.setDefault({hasColor: true}, 'colorASCII')
+themes.setDefault({platform: 'darwin', hasUnicode: true}, 'brailleSpinner')
+themes.setDefault({platform: 'darwin', hasUnicode: true, hasColor: true}, 'colorBrailleSpinner')
+
+
+/***/ }),
+/* 301 */,
+/* 302 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = realpath
+realpath.realpath = realpath
+realpath.sync = realpathSync
+realpath.realpathSync = realpathSync
+realpath.monkeypatch = monkeypatch
+realpath.unmonkeypatch = unmonkeypatch
+
+var fs = __webpack_require__(747)
+var origRealpath = fs.realpath
+var origRealpathSync = fs.realpathSync
+
+var version = process.version
+var ok = /^v[0-5]\./.test(version)
+var old = __webpack_require__(117)
+
+function newError (er) {
+ return er && er.syscall === 'realpath' && (
+ er.code === 'ELOOP' ||
+ er.code === 'ENOMEM' ||
+ er.code === 'ENAMETOOLONG'
+ )
+}
+
+function realpath (p, cache, cb) {
+ if (ok) {
+ return origRealpath(p, cache, cb)
+ }
+
+ if (typeof cache === 'function') {
+ cb = cache
+ cache = null
+ }
+ origRealpath(p, cache, function (er, result) {
+ if (newError(er)) {
+ old.realpath(p, cache, cb)
+ } else {
+ cb(er, result)
+ }
+ })
+}
+
+function realpathSync (p, cache) {
+ if (ok) {
+ return origRealpathSync(p, cache)
+ }
+
+ try {
+ return origRealpathSync(p, cache)
+ } catch (er) {
+ if (newError(er)) {
+ return old.realpathSync(p, cache)
+ } else {
+ throw er
+ }
+ }
+}
+
+function monkeypatch () {
+ fs.realpath = realpath
+ fs.realpathSync = realpathSync
+}
+
+function unmonkeypatch () {
+ fs.realpath = origRealpath
+ fs.realpathSync = origRealpathSync
+}
+
+
+/***/ }),
+/* 303 */
+/***/ (function(module) {
+
+module.exports = require("async_hooks");
+
+/***/ }),
+/* 304 */
+/***/ (function(module) {
+
+module.exports = require("string_decoder");
+
+/***/ }),
+/* 305 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+
+const BB = __webpack_require__(900)
+
+const contentPath = __webpack_require__(969)
+const figgyPudding = __webpack_require__(965)
+const finished = BB.promisify(__webpack_require__(371).finished)
+const fixOwner = __webpack_require__(133)
+const fs = __webpack_require__(598)
+const glob = BB.promisify(__webpack_require__(402))
+const index = __webpack_require__(407)
+const path = __webpack_require__(622)
+const rimraf = BB.promisify(__webpack_require__(342))
+const ssri = __webpack_require__(951)
+
+BB.promisifyAll(fs)
+
+const VerifyOpts = figgyPudding({
+ concurrency: {
+ default: 20
+ },
+ filter: {},
+ log: {
+ default: { silly () {} }
+ }
+})
+
+module.exports = verify
+function verify (cache, opts) {
+ opts = VerifyOpts(opts)
+ opts.log.silly('verify', 'verifying cache at', cache)
+ return BB.reduce([
+ markStartTime,
+ fixPerms,
+ garbageCollect,
+ rebuildIndex,
+ cleanTmp,
+ writeVerifile,
+ markEndTime
+ ], (stats, step, i) => {
+ const label = step.name || `step #${i}`
+ const start = new Date()
+ return BB.resolve(step(cache, opts)).then(s => {
+ s && Object.keys(s).forEach(k => {
+ stats[k] = s[k]
+ })
+ const end = new Date()
+ if (!stats.runTime) { stats.runTime = {} }
+ stats.runTime[label] = end - start
+ return stats
+ })
+ }, {}).tap(stats => {
+ stats.runTime.total = stats.endTime - stats.startTime
+ opts.log.silly('verify', 'verification finished for', cache, 'in', `${stats.runTime.total}ms`)
+ })
+}
+
+function markStartTime (cache, opts) {
+ return { startTime: new Date() }
+}
+
+function markEndTime (cache, opts) {
+ return { endTime: new Date() }
+}
+
+function fixPerms (cache, opts) {
+ opts.log.silly('verify', 'fixing cache permissions')
+ return fixOwner.mkdirfix(cache, cache).then(() => {
+ // TODO - fix file permissions too
+ return fixOwner.chownr(cache, cache)
+ }).then(() => null)
+}
+
+// Implements a naive mark-and-sweep tracing garbage collector.
+//
+// The algorithm is basically as follows:
+// 1. Read (and filter) all index entries ("pointers")
+// 2. Mark each integrity value as "live"
+// 3. Read entire filesystem tree in `content-vX/` dir
+// 4. If content is live, verify its checksum and delete it if it fails
+// 5. If content is not marked as live, rimraf it.
+//
+function garbageCollect (cache, opts) {
+ opts.log.silly('verify', 'garbage collecting content')
+ const indexStream = index.lsStream(cache)
+ const liveContent = new Set()
+ indexStream.on('data', entry => {
+ if (opts.filter && !opts.filter(entry)) { return }
+ liveContent.add(entry.integrity.toString())
+ })
+ return finished(indexStream).then(() => {
+ const contentDir = contentPath._contentDir(cache)
+ return glob(path.join(contentDir, '**'), {
+ follow: false,
+ nodir: true,
+ nosort: true
+ }).then(files => {
+ return BB.resolve({
+ verifiedContent: 0,
+ reclaimedCount: 0,
+ reclaimedSize: 0,
+ badContentCount: 0,
+ keptSize: 0
+ }).tap((stats) => BB.map(files, (f) => {
+ const split = f.split(/[/\\]/)
+ const digest = split.slice(split.length - 3).join('')
+ const algo = split[split.length - 4]
+ const integrity = ssri.fromHex(digest, algo)
+ if (liveContent.has(integrity.toString())) {
+ return verifyContent(f, integrity).then(info => {
+ if (!info.valid) {
+ stats.reclaimedCount++
+ stats.badContentCount++
+ stats.reclaimedSize += info.size
+ } else {
+ stats.verifiedContent++
+ stats.keptSize += info.size
}
+ return stats
+ })
+ } else {
+ // No entries refer to this content. We can delete.
+ stats.reclaimedCount++
+ return fs.statAsync(f).then(s => {
+ return rimraf(f).then(() => {
+ stats.reclaimedSize += s.size
+ return stats
+ })
+ })
}
+ }, { concurrency: opts.concurrency }))
+ })
+ })
+}
+
+function verifyContent (filepath, sri) {
+ return fs.statAsync(filepath).then(stat => {
+ const contentInfo = {
+ size: stat.size,
+ valid: true
}
-};
-var DelimitedTextConfiguration = {
- serializedName: "DelimitedTextConfiguration",
- type: {
- name: "Composite",
- className: "DelimitedTextConfiguration",
- modelProperties: {
- columnSeparator: {
- xmlName: "ColumnSeparator",
- required: true,
- serializedName: "ColumnSeparator",
- type: {
- name: "String"
- }
- },
- fieldQuote: {
- xmlName: "FieldQuote",
- required: true,
- serializedName: "FieldQuote",
- type: {
- name: "String"
- }
- },
- recordSeparator: {
- xmlName: "RecordSeparator",
- required: true,
- serializedName: "RecordSeparator",
- type: {
- name: "String"
- }
- },
- escapeChar: {
- xmlName: "EscapeChar",
- required: true,
- serializedName: "EscapeChar",
- type: {
- name: "String"
- }
- },
- headersPresent: {
- xmlName: "HasHeaders",
- required: true,
- serializedName: "HeadersPresent",
- type: {
- name: "Boolean"
- }
- }
- }
+ return ssri.checkStream(
+ fs.createReadStream(filepath),
+ sri
+ ).catch(err => {
+ if (err.code !== 'EINTEGRITY') { throw err }
+ return rimraf(filepath).then(() => {
+ contentInfo.valid = false
+ })
+ }).then(() => contentInfo)
+ }).catch({ code: 'ENOENT' }, () => ({ size: 0, valid: false }))
+}
+
+function rebuildIndex (cache, opts) {
+ opts.log.silly('verify', 'rebuilding index')
+ return index.ls(cache).then(entries => {
+ const stats = {
+ missingContent: 0,
+ rejectedEntries: 0,
+ totalEntries: 0
}
-};
-var JsonTextConfiguration = {
- serializedName: "JsonTextConfiguration",
- type: {
- name: "Composite",
- className: "JsonTextConfiguration",
- modelProperties: {
- recordSeparator: {
- xmlName: "RecordSeparator",
- required: true,
- serializedName: "RecordSeparator",
- type: {
- name: "String"
- }
- }
+ const buckets = {}
+ for (let k in entries) {
+ if (entries.hasOwnProperty(k)) {
+ const hashed = index._hashKey(k)
+ const entry = entries[k]
+ const excluded = opts.filter && !opts.filter(entry)
+ excluded && stats.rejectedEntries++
+ if (buckets[hashed] && !excluded) {
+ buckets[hashed].push(entry)
+ } else if (buckets[hashed] && excluded) {
+ // skip
+ } else if (excluded) {
+ buckets[hashed] = []
+ buckets[hashed]._path = index._bucketPath(cache, k)
+ } else {
+ buckets[hashed] = [entry]
+ buckets[hashed]._path = index._bucketPath(cache, k)
}
+ }
}
-};
-var ListContainersSegmentResponse = {
- xmlName: "EnumerationResults",
- serializedName: "ListContainersSegmentResponse",
- type: {
- name: "Composite",
- className: "ListContainersSegmentResponse",
- modelProperties: {
- serviceEndpoint: {
- xmlIsAttribute: true,
- xmlName: "ServiceEndpoint",
- required: true,
- serializedName: "ServiceEndpoint",
- type: {
- name: "String"
- }
- },
- prefix: {
- xmlName: "Prefix",
- serializedName: "Prefix",
- type: {
- name: "String"
- }
- },
- marker: {
- xmlName: "Marker",
- serializedName: "Marker",
- type: {
- name: "String"
- }
- },
- maxPageSize: {
- xmlName: "MaxResults",
- serializedName: "MaxResults",
- type: {
- name: "Number"
- }
- },
- containerItems: {
- xmlIsWrapped: true,
- xmlName: "Containers",
- xmlElementName: "Container",
- required: true,
- serializedName: "ContainerItems",
- type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "ContainerItem"
- }
- }
- }
- },
- continuationToken: {
- xmlName: "NextMarker",
- serializedName: "NextMarker",
- type: {
- name: "String"
- }
- }
- }
+ return BB.map(Object.keys(buckets), key => {
+ return rebuildBucket(cache, buckets[key], stats, opts)
+ }, { concurrency: opts.concurrency }).then(() => stats)
+ })
+}
+
+function rebuildBucket (cache, bucket, stats, opts) {
+ return fs.truncateAsync(bucket._path).then(() => {
+ // This needs to be serialized because cacache explicitly
+ // lets very racy bucket conflicts clobber each other.
+ return BB.mapSeries(bucket, entry => {
+ const content = contentPath(cache, entry.integrity)
+ return fs.statAsync(content).then(() => {
+ return index.insert(cache, entry.key, entry.integrity, {
+ metadata: entry.metadata,
+ size: entry.size
+ }).then(() => { stats.totalEntries++ })
+ }).catch({ code: 'ENOENT' }, () => {
+ stats.rejectedEntries++
+ stats.missingContent++
+ })
+ })
+ })
+}
+
+function cleanTmp (cache, opts) {
+ opts.log.silly('verify', 'cleaning tmp directory')
+ return rimraf(path.join(cache, 'tmp'))
+}
+
+function writeVerifile (cache, opts) {
+ const verifile = path.join(cache, '_lastverified')
+ opts.log.silly('verify', 'writing verifile to ' + verifile)
+ try {
+ return fs.writeFileAsync(verifile, '' + (+(new Date())))
+ } finally {
+ fixOwner.chownr.sync(cache, verifile)
+ }
+}
+
+module.exports.lastRun = lastRun
+function lastRun (cache) {
+ return fs.readFileAsync(
+ path.join(cache, '_lastverified'), 'utf8'
+ ).then(data => new Date(+data))
+}
+
+
+/***/ }),
+/* 306 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+var concatMap = __webpack_require__(896);
+var balanced = __webpack_require__(621);
+
+module.exports = expandTop;
+
+var escSlash = '\0SLASH'+Math.random()+'\0';
+var escOpen = '\0OPEN'+Math.random()+'\0';
+var escClose = '\0CLOSE'+Math.random()+'\0';
+var escComma = '\0COMMA'+Math.random()+'\0';
+var escPeriod = '\0PERIOD'+Math.random()+'\0';
+
+function numeric(str) {
+ return parseInt(str, 10) == str
+ ? parseInt(str, 10)
+ : str.charCodeAt(0);
+}
+
+function escapeBraces(str) {
+ return str.split('\\\\').join(escSlash)
+ .split('\\{').join(escOpen)
+ .split('\\}').join(escClose)
+ .split('\\,').join(escComma)
+ .split('\\.').join(escPeriod);
+}
+
+function unescapeBraces(str) {
+ return str.split(escSlash).join('\\')
+ .split(escOpen).join('{')
+ .split(escClose).join('}')
+ .split(escComma).join(',')
+ .split(escPeriod).join('.');
+}
+
+
+// Basically just str.split(","), but handling cases
+// where we have nested braced sections, which should be
+// treated as individual members, like {a,{b,c},d}
+function parseCommaParts(str) {
+ if (!str)
+ return [''];
+
+ var parts = [];
+ var m = balanced('{', '}', str);
+
+ if (!m)
+ return str.split(',');
+
+ var pre = m.pre;
+ var body = m.body;
+ var post = m.post;
+ var p = pre.split(',');
+
+ p[p.length-1] += '{' + body + '}';
+ var postParts = parseCommaParts(post);
+ if (post.length) {
+ p[p.length-1] += postParts.shift();
+ p.push.apply(p, postParts);
+ }
+
+ parts.push.apply(parts, p);
+
+ return parts;
+}
+
+function expandTop(str) {
+ if (!str)
+ return [];
+
+ // I don't know why Bash 4.3 does this, but it does.
+ // Anything starting with {} will have the first two bytes preserved
+ // but *only* at the top level, so {},a}b will not expand to anything,
+ // but a{},b}c will be expanded to [a}c,abc].
+ // One could argue that this is a bug in Bash, but since the goal of
+ // this module is to match Bash's rules, we escape a leading {}
+ if (str.substr(0, 2) === '{}') {
+ str = '\\{\\}' + str.substr(2);
+ }
+
+ return expand(escapeBraces(str), true).map(unescapeBraces);
+}
+
+function identity(e) {
+ return e;
+}
+
+function embrace(str) {
+ return '{' + str + '}';
+}
+function isPadded(el) {
+ return /^-?0\d/.test(el);
+}
+
+function lte(i, y) {
+ return i <= y;
+}
+function gte(i, y) {
+ return i >= y;
+}
+
+function expand(str, isTop) {
+ var expansions = [];
+
+ var m = balanced('{', '}', str);
+ if (!m || /\$$/.test(m.pre)) return [str];
+
+ var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
+ var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
+ var isSequence = isNumericSequence || isAlphaSequence;
+ var isOptions = m.body.indexOf(',') >= 0;
+ if (!isSequence && !isOptions) {
+ // {a},b}
+ if (m.post.match(/,.*\}/)) {
+ str = m.pre + '{' + m.body + escClose + m.post;
+ return expand(str);
}
-};
-var CorsRule = {
- serializedName: "CorsRule",
- type: {
- name: "Composite",
- className: "CorsRule",
- modelProperties: {
- allowedOrigins: {
- xmlName: "AllowedOrigins",
- required: true,
- serializedName: "AllowedOrigins",
- type: {
- name: "String"
- }
- },
- allowedMethods: {
- xmlName: "AllowedMethods",
- required: true,
- serializedName: "AllowedMethods",
- type: {
- name: "String"
- }
- },
- allowedHeaders: {
- xmlName: "AllowedHeaders",
- required: true,
- serializedName: "AllowedHeaders",
- type: {
- name: "String"
- }
- },
- exposedHeaders: {
- xmlName: "ExposedHeaders",
- required: true,
- serializedName: "ExposedHeaders",
- type: {
- name: "String"
- }
- },
- maxAgeInSeconds: {
- xmlName: "MaxAgeInSeconds",
- required: true,
- serializedName: "MaxAgeInSeconds",
- constraints: {
- InclusiveMinimum: 0
- },
- type: {
- name: "Number"
- }
- }
- }
+ return [str];
+ }
+
+ var n;
+ if (isSequence) {
+ n = m.body.split(/\.\./);
+ } else {
+ n = parseCommaParts(m.body);
+ if (n.length === 1) {
+ // x{{a,b}}y ==> x{a}y x{b}y
+ n = expand(n[0], false).map(embrace);
+ if (n.length === 1) {
+ var post = m.post.length
+ ? expand(m.post, false)
+ : [''];
+ return post.map(function(p) {
+ return m.pre + n[0] + p;
+ });
+ }
}
-};
-var FilterBlobItem = {
- xmlName: "Blob",
- serializedName: "FilterBlobItem",
- type: {
- name: "Composite",
- className: "FilterBlobItem",
- modelProperties: {
- name: {
- xmlName: "Name",
- required: true,
- serializedName: "Name",
- type: {
- name: "String"
- }
- },
- containerName: {
- xmlName: "ContainerName",
- required: true,
- serializedName: "ContainerName",
- type: {
- name: "String"
- }
- },
- tagValue: {
- xmlName: "TagValue",
- required: true,
- serializedName: "TagValue",
- type: {
- name: "String"
- }
- }
- }
+ }
+
+ // at this point, n is the parts, and we know it's not a comma set
+ // with a single entry.
+
+ // no need to expand pre, since it is guaranteed to be free of brace-sets
+ var pre = m.pre;
+ var post = m.post.length
+ ? expand(m.post, false)
+ : [''];
+
+ var N;
+
+ if (isSequence) {
+ var x = numeric(n[0]);
+ var y = numeric(n[1]);
+ var width = Math.max(n[0].length, n[1].length)
+ var incr = n.length == 3
+ ? Math.abs(numeric(n[2]))
+ : 1;
+ var test = lte;
+ var reverse = y < x;
+ if (reverse) {
+ incr *= -1;
+ test = gte;
}
-};
-var FilterBlobSegment = {
- xmlName: "EnumerationResults",
- serializedName: "FilterBlobSegment",
- type: {
- name: "Composite",
- className: "FilterBlobSegment",
- modelProperties: {
- serviceEndpoint: {
- xmlIsAttribute: true,
- xmlName: "ServiceEndpoint",
- required: true,
- serializedName: "ServiceEndpoint",
- type: {
- name: "String"
- }
- },
- where: {
- xmlName: "Where",
- required: true,
- serializedName: "Where",
- type: {
- name: "String"
- }
- },
- blobs: {
- xmlIsWrapped: true,
- xmlName: "Blobs",
- xmlElementName: "Blob",
- required: true,
- serializedName: "Blobs",
- type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "FilterBlobItem"
- }
- }
- }
- },
- continuationToken: {
- xmlName: "NextMarker",
- serializedName: "NextMarker",
- type: {
- name: "String"
- }
- }
+ var pad = n.some(isPadded);
+
+ N = [];
+
+ for (var i = x; test(i, y); i += incr) {
+ var c;
+ if (isAlphaSequence) {
+ c = String.fromCharCode(i);
+ if (c === '\\')
+ c = '';
+ } else {
+ c = String(i);
+ if (pad) {
+ var need = width - c.length;
+ if (need > 0) {
+ var z = new Array(need + 1).join('0');
+ if (i < 0)
+ c = '-' + z + c.slice(1);
+ else
+ c = z + c;
+ }
}
+ }
+ N.push(c);
}
-};
-var GeoReplication = {
- serializedName: "GeoReplication",
- type: {
- name: "Composite",
- className: "GeoReplication",
- modelProperties: {
- status: {
- xmlName: "Status",
- required: true,
- serializedName: "Status",
- type: {
- name: "String"
- }
- },
- lastSyncOn: {
- xmlName: "LastSyncTime",
- required: true,
- serializedName: "LastSyncTime",
- type: {
- name: "DateTimeRfc1123"
- }
- }
- }
+ } else {
+ N = concatMap(n, function(el) { return expand(el, false) });
+ }
+
+ for (var j = 0; j < N.length; j++) {
+ for (var k = 0; k < post.length; k++) {
+ var expansion = pre + N[j] + post[k];
+ if (!isTop || isSequence || expansion)
+ expansions.push(expansion);
}
-};
-var RetentionPolicy = {
- serializedName: "RetentionPolicy",
- type: {
- name: "Composite",
- className: "RetentionPolicy",
- modelProperties: {
- enabled: {
- xmlName: "Enabled",
- required: true,
- serializedName: "Enabled",
- type: {
- name: "Boolean"
- }
- },
- days: {
- xmlName: "Days",
- serializedName: "Days",
- constraints: {
- InclusiveMinimum: 1
- },
- type: {
- name: "Number"
- }
- }
- }
+ }
+
+ return expansions;
+}
+
+
+
+/***/ }),
+/* 307 */,
+/* 308 */,
+/* 309 */,
+/* 310 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+const Range = __webpack_require__(124)
+const satisfies = (version, range, options) => {
+ try {
+ range = new Range(range, options)
+ } catch (er) {
+ return false
+ }
+ return range.test(version)
+}
+module.exports = satisfies
+
+
+/***/ }),
+/* 311 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+
+const rm = __webpack_require__(974)
+const link = __webpack_require__(273)
+const mkdir = __webpack_require__(836)
+const binLink = __webpack_require__(834)
+
+exports = module.exports = {
+ rm: rm,
+ link: link.link,
+ linkIfExists: link.linkIfExists,
+ mkdir: mkdir,
+ binLink: binLink
+}
+
+
+/***/ }),
+/* 312 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+// Generated by CoffeeScript 1.12.7
+(function() {
+ var NodeType, WriterState, XMLDOMImplementation, XMLDocument, XMLDocumentCB, XMLStreamWriter, XMLStringWriter, assign, isFunction, ref;
+
+ ref = __webpack_require__(582), assign = ref.assign, isFunction = ref.isFunction;
+
+ XMLDOMImplementation = __webpack_require__(515);
+
+ XMLDocument = __webpack_require__(559);
+
+ XMLDocumentCB = __webpack_require__(768);
+
+ XMLStringWriter = __webpack_require__(347);
+
+ XMLStreamWriter = __webpack_require__(458);
+
+ NodeType = __webpack_require__(683);
+
+ WriterState = __webpack_require__(541);
+
+ module.exports.create = function(name, xmldec, doctype, options) {
+ var doc, root;
+ if (name == null) {
+ throw new Error("Root element needs a name.");
}
-};
-var Logging = {
- serializedName: "Logging",
- type: {
- name: "Composite",
- className: "Logging",
- modelProperties: {
- version: {
- xmlName: "Version",
- required: true,
- serializedName: "Version",
- type: {
- name: "String"
- }
- },
- deleteProperty: {
- xmlName: "Delete",
- required: true,
- serializedName: "Delete",
- type: {
- name: "Boolean"
- }
- },
- read: {
- xmlName: "Read",
- required: true,
- serializedName: "Read",
- type: {
- name: "Boolean"
- }
- },
- write: {
- xmlName: "Write",
- required: true,
- serializedName: "Write",
- type: {
- name: "Boolean"
- }
- },
- retentionPolicy: {
- xmlName: "RetentionPolicy",
- required: true,
- serializedName: "RetentionPolicy",
- type: {
- name: "Composite",
- className: "RetentionPolicy"
- }
- }
- }
+ options = assign({}, xmldec, doctype, options);
+ doc = new XMLDocument(options);
+ root = doc.element(name);
+ if (!options.headless) {
+ doc.declaration(options);
+ if ((options.pubID != null) || (options.sysID != null)) {
+ doc.dtd(options);
+ }
}
-};
-var Metrics = {
- serializedName: "Metrics",
- type: {
- name: "Composite",
- className: "Metrics",
- modelProperties: {
- version: {
- xmlName: "Version",
- serializedName: "Version",
- type: {
- name: "String"
- }
- },
- enabled: {
- xmlName: "Enabled",
- required: true,
- serializedName: "Enabled",
- type: {
- name: "Boolean"
- }
- },
- includeAPIs: {
- xmlName: "IncludeAPIs",
- serializedName: "IncludeAPIs",
- type: {
- name: "Boolean"
- }
- },
- retentionPolicy: {
- xmlName: "RetentionPolicy",
- serializedName: "RetentionPolicy",
- type: {
- name: "Composite",
- className: "RetentionPolicy"
- }
- }
- }
+ return root;
+ };
+
+ module.exports.begin = function(options, onData, onEnd) {
+ var ref1;
+ if (isFunction(options)) {
+ ref1 = [options, onData], onData = ref1[0], onEnd = ref1[1];
+ options = {};
}
-};
-var PageRange = {
- serializedName: "PageRange",
- type: {
- name: "Composite",
- className: "PageRange",
- modelProperties: {
- start: {
- xmlName: "Start",
- required: true,
- serializedName: "Start",
- type: {
- name: "Number"
- }
- },
- end: {
- xmlName: "End",
- required: true,
- serializedName: "End",
- type: {
- name: "Number"
- }
- }
+ if (onData) {
+ return new XMLDocumentCB(options, onData, onEnd);
+ } else {
+ return new XMLDocument(options);
+ }
+ };
+
+ module.exports.stringWriter = function(options) {
+ return new XMLStringWriter(options);
+ };
+
+ module.exports.streamWriter = function(stream, options) {
+ return new XMLStreamWriter(stream, options);
+ };
+
+ module.exports.implementation = new XMLDOMImplementation();
+
+ module.exports.nodeType = NodeType;
+
+ module.exports.writerState = WriterState;
+
+}).call(this);
+
+
+/***/ }),
+/* 313 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+
+var Buffer = __webpack_require__(215).Buffer;
+
+// NOTE: Due to 'stream' module being pretty large (~100Kb, significant in browser environments),
+// we opt to dependency-inject it instead of creating a hard dependency.
+module.exports = function(stream_module) {
+ var Transform = stream_module.Transform;
+
+ // == Encoder stream =======================================================
+
+ function IconvLiteEncoderStream(conv, options) {
+ this.conv = conv;
+ options = options || {};
+ options.decodeStrings = false; // We accept only strings, so we don't need to decode them.
+ Transform.call(this, options);
+ }
+
+ IconvLiteEncoderStream.prototype = Object.create(Transform.prototype, {
+ constructor: { value: IconvLiteEncoderStream }
+ });
+
+ IconvLiteEncoderStream.prototype._transform = function(chunk, encoding, done) {
+ if (typeof chunk != 'string')
+ return done(new Error("Iconv encoding stream needs strings as its input."));
+ try {
+ var res = this.conv.write(chunk);
+ if (res && res.length) this.push(res);
+ done();
+ }
+ catch (e) {
+ done(e);
}
}
-};
-var ClearRange = {
- serializedName: "ClearRange",
- type: {
- name: "Composite",
- className: "ClearRange",
- modelProperties: {
- start: {
- xmlName: "Start",
- required: true,
- serializedName: "Start",
- type: {
- name: "Number"
- }
- },
- end: {
- xmlName: "End",
- required: true,
- serializedName: "End",
- type: {
- name: "Number"
- }
- }
+
+ IconvLiteEncoderStream.prototype._flush = function(done) {
+ try {
+ var res = this.conv.end();
+ if (res && res.length) this.push(res);
+ done();
+ }
+ catch (e) {
+ done(e);
}
}
-};
-var PageList = {
- serializedName: "PageList",
- type: {
- name: "Composite",
- className: "PageList",
- modelProperties: {
- pageRange: {
- xmlName: "PageRange",
- xmlElementName: "PageRange",
- serializedName: "PageRange",
- type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "PageRange"
- }
- }
- }
- },
- clearRange: {
- xmlName: "ClearRange",
- xmlElementName: "ClearRange",
- serializedName: "ClearRange",
- type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "ClearRange"
- }
- }
- }
- }
+
+ IconvLiteEncoderStream.prototype.collect = function(cb) {
+ var chunks = [];
+ this.on('error', cb);
+ this.on('data', function(chunk) { chunks.push(chunk); });
+ this.on('end', function() {
+ cb(null, Buffer.concat(chunks));
+ });
+ return this;
+ }
+
+
+ // == Decoder stream =======================================================
+
+ function IconvLiteDecoderStream(conv, options) {
+ this.conv = conv;
+ options = options || {};
+ options.encoding = this.encoding = 'utf8'; // We output strings.
+ Transform.call(this, options);
+ }
+
+ IconvLiteDecoderStream.prototype = Object.create(Transform.prototype, {
+ constructor: { value: IconvLiteDecoderStream }
+ });
+
+ IconvLiteDecoderStream.prototype._transform = function(chunk, encoding, done) {
+ if (!Buffer.isBuffer(chunk) && !(chunk instanceof Uint8Array))
+ return done(new Error("Iconv decoding stream needs buffers as its input."));
+ try {
+ var res = this.conv.write(chunk);
+ if (res && res.length) this.push(res, this.encoding);
+ done();
+ }
+ catch (e) {
+ done(e);
}
}
-};
-var QueryFormat = {
- serializedName: "QueryFormat",
- type: {
- name: "Composite",
- className: "QueryFormat",
- modelProperties: {
- type: {
- xmlName: "Type",
- serializedName: "Type",
- type: {
- name: "Enum",
- allowedValues: [
- "delimited",
- "json"
- ]
- }
- },
- delimitedTextConfiguration: {
- xmlName: "DelimitedTextConfiguration",
- serializedName: "DelimitedTextConfiguration",
- type: {
- name: "Composite",
- className: "DelimitedTextConfiguration"
- }
- },
- jsonTextConfiguration: {
- xmlName: "JsonTextConfiguration",
- serializedName: "JsonTextConfiguration",
- type: {
- name: "Composite",
- className: "JsonTextConfiguration"
- }
- }
+
+ IconvLiteDecoderStream.prototype._flush = function(done) {
+ try {
+ var res = this.conv.end();
+ if (res && res.length) this.push(res, this.encoding);
+ done();
}
+ catch (e) {
+ done(e);
+ }
+ }
+
+ IconvLiteDecoderStream.prototype.collect = function(cb) {
+ var res = '';
+ this.on('error', cb);
+ this.on('data', function(chunk) { res += chunk; });
+ this.on('end', function() {
+ cb(null, res);
+ });
+ return this;
}
+
+ return {
+ IconvLiteEncoderStream: IconvLiteEncoderStream,
+ IconvLiteDecoderStream: IconvLiteDecoderStream,
+ };
};
-var QuerySerialization = {
- serializedName: "QuerySerialization",
- type: {
- name: "Composite",
- className: "QuerySerialization",
- modelProperties: {
- format: {
- xmlName: "Format",
- required: true,
- serializedName: "Format",
- type: {
- name: "Composite",
- className: "QueryFormat"
- }
- }
- }
+
+
+/***/ }),
+/* 314 */,
+/* 315 */
+/***/ (function(module) {
+
+"use strict";
+
+module.exports = function(Promise) {
+function returner() {
+ return this.value;
+}
+function thrower() {
+ throw this.reason;
+}
+
+Promise.prototype["return"] =
+Promise.prototype.thenReturn = function (value) {
+ if (value instanceof Promise) value.suppressUnhandledRejections();
+ return this._then(
+ returner, undefined, undefined, {value: value}, undefined);
+};
+
+Promise.prototype["throw"] =
+Promise.prototype.thenThrow = function (reason) {
+ return this._then(
+ thrower, undefined, undefined, {reason: reason}, undefined);
+};
+
+Promise.prototype.catchThrow = function (reason) {
+ if (arguments.length <= 1) {
+ return this._then(
+ undefined, thrower, undefined, {reason: reason}, undefined);
+ } else {
+ var _reason = arguments[1];
+ var handler = function() {throw _reason;};
+ return this.caught(reason, handler);
}
};
-var QueryRequest = {
- serializedName: "QueryRequest",
- type: {
- name: "Composite",
- className: "QueryRequest",
- modelProperties: {
- queryType: {
- xmlName: "QueryType",
- required: true,
- isConstant: true,
- serializedName: "QueryType",
- defaultValue: 'SQL',
- type: {
- name: "String"
- }
- },
- expression: {
- xmlName: "Expression",
- required: true,
- serializedName: "Expression",
- type: {
- name: "String"
- }
- },
- inputSerialization: {
- xmlName: "InputSerialization",
- serializedName: "InputSerialization",
- type: {
- name: "Composite",
- className: "QuerySerialization"
- }
- },
- outputSerialization: {
- xmlName: "OutputSerialization",
- serializedName: "OutputSerialization",
- type: {
- name: "Composite",
- className: "QuerySerialization"
- }
- }
- }
- }
-};
-var SignedIdentifier = {
- serializedName: "SignedIdentifier",
- type: {
- name: "Composite",
- className: "SignedIdentifier",
- modelProperties: {
- id: {
- xmlName: "Id",
- required: true,
- serializedName: "Id",
- type: {
- name: "String"
- }
- },
- accessPolicy: {
- xmlName: "AccessPolicy",
- required: true,
- serializedName: "AccessPolicy",
- type: {
- name: "Composite",
- className: "AccessPolicy"
- }
- }
- }
+
+Promise.prototype.catchReturn = function (value) {
+ if (arguments.length <= 1) {
+ if (value instanceof Promise) value.suppressUnhandledRejections();
+ return this._then(
+ undefined, returner, undefined, {value: value}, undefined);
+ } else {
+ var _value = arguments[1];
+ if (_value instanceof Promise) _value.suppressUnhandledRejections();
+ var handler = function() {return _value;};
+ return this.caught(value, handler);
}
};
-var StaticWebsite = {
- serializedName: "StaticWebsite",
- type: {
- name: "Composite",
- className: "StaticWebsite",
- modelProperties: {
- enabled: {
- xmlName: "Enabled",
- required: true,
- serializedName: "Enabled",
- type: {
- name: "Boolean"
- }
- },
- indexDocument: {
- xmlName: "IndexDocument",
- serializedName: "IndexDocument",
- type: {
- name: "String"
- }
- },
- errorDocument404Path: {
- xmlName: "ErrorDocument404Path",
- serializedName: "ErrorDocument404Path",
- type: {
- name: "String"
- }
- },
- defaultIndexDocumentPath: {
- xmlName: "DefaultIndexDocumentPath",
- serializedName: "DefaultIndexDocumentPath",
- type: {
- name: "String"
- }
- }
- }
- }
};
-var BlobServiceProperties = {
- xmlName: "StorageServiceProperties",
- serializedName: "BlobServiceProperties",
- type: {
- name: "Composite",
- className: "BlobServiceProperties",
- modelProperties: {
- blobAnalyticsLogging: {
- xmlName: "Logging",
- serializedName: "Logging",
- type: {
- name: "Composite",
- className: "Logging"
- }
- },
- hourMetrics: {
- xmlName: "HourMetrics",
- serializedName: "HourMetrics",
- type: {
- name: "Composite",
- className: "Metrics"
- }
- },
- minuteMetrics: {
- xmlName: "MinuteMetrics",
- serializedName: "MinuteMetrics",
- type: {
- name: "Composite",
- className: "Metrics"
- }
- },
- cors: {
- xmlIsWrapped: true,
- xmlName: "Cors",
- xmlElementName: "CorsRule",
- serializedName: "Cors",
- type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "CorsRule"
- }
- }
- }
- },
- defaultServiceVersion: {
- xmlName: "DefaultServiceVersion",
- serializedName: "DefaultServiceVersion",
- type: {
- name: "String"
- }
- },
- deleteRetentionPolicy: {
- xmlName: "DeleteRetentionPolicy",
- serializedName: "DeleteRetentionPolicy",
- type: {
- name: "Composite",
- className: "RetentionPolicy"
- }
- },
- staticWebsite: {
- xmlName: "StaticWebsite",
- serializedName: "StaticWebsite",
- type: {
- name: "Composite",
- className: "StaticWebsite"
- }
- }
- }
+
+
+/***/ }),
+/* 316 */,
+/* 317 */
+/***/ (function(__unusedmodule, exports) {
+
+var undefined = (void 0); // Paranoia
+
+// Beyond this value, index getters/setters (i.e. array[0], array[1]) are so slow to
+// create, and consume so much memory, that the browser appears frozen.
+var MAX_ARRAY_LENGTH = 1e5;
+
+// Approximations of internal ECMAScript conversion functions
+var ECMAScript = (function() {
+ // Stash a copy in case other scripts modify these
+ var opts = Object.prototype.toString,
+ ophop = Object.prototype.hasOwnProperty;
+
+ return {
+ // Class returns internal [[Class]] property, used to avoid cross-frame instanceof issues:
+ Class: function(v) { return opts.call(v).replace(/^\[object *|\]$/g, ''); },
+ HasProperty: function(o, p) { return p in o; },
+ HasOwnProperty: function(o, p) { return ophop.call(o, p); },
+ IsCallable: function(o) { return typeof o === 'function'; },
+ ToInt32: function(v) { return v >> 0; },
+ ToUint32: function(v) { return v >>> 0; }
+ };
+}());
+
+// Snapshot intrinsics
+var LN2 = Math.LN2,
+ abs = Math.abs,
+ floor = Math.floor,
+ log = Math.log,
+ min = Math.min,
+ pow = Math.pow,
+ round = Math.round;
+
+// ES5: lock down object properties
+function configureProperties(obj) {
+ if (getOwnPropNames && defineProp) {
+ var props = getOwnPropNames(obj), i;
+ for (i = 0; i < props.length; i += 1) {
+ defineProp(obj, props[i], {
+ value: obj[props[i]],
+ writable: false,
+ enumerable: false,
+ configurable: false
+ });
}
-};
-var BlobServiceStatistics = {
- xmlName: "StorageServiceStats",
- serializedName: "BlobServiceStatistics",
- type: {
- name: "Composite",
- className: "BlobServiceStatistics",
- modelProperties: {
- geoReplication: {
- xmlName: "GeoReplication",
- serializedName: "GeoReplication",
- type: {
- name: "Composite",
- className: "GeoReplication"
- }
- }
- }
+ }
+}
+
+// emulate ES5 getter/setter API using legacy APIs
+// http://blogs.msdn.com/b/ie/archive/2010/09/07/transitioning-existing-code-to-the-es5-getter-setter-apis.aspx
+// (second clause tests for Object.defineProperty() in IE<9 that only supports extending DOM prototypes, but
+// note that IE<9 does not support __defineGetter__ or __defineSetter__ so it just renders the method harmless)
+var defineProp
+if (Object.defineProperty && (function() {
+ try {
+ Object.defineProperty({}, 'x', {});
+ return true;
+ } catch (e) {
+ return false;
+ }
+ })()) {
+ defineProp = Object.defineProperty;
+} else {
+ defineProp = function(o, p, desc) {
+ if (!o === Object(o)) throw new TypeError("Object.defineProperty called on non-object");
+ if (ECMAScript.HasProperty(desc, 'get') && Object.prototype.__defineGetter__) { Object.prototype.__defineGetter__.call(o, p, desc.get); }
+ if (ECMAScript.HasProperty(desc, 'set') && Object.prototype.__defineSetter__) { Object.prototype.__defineSetter__.call(o, p, desc.set); }
+ if (ECMAScript.HasProperty(desc, 'value')) { o[p] = desc.value; }
+ return o;
+ };
+}
+
+var getOwnPropNames = Object.getOwnPropertyNames || function (o) {
+ if (o !== Object(o)) throw new TypeError("Object.getOwnPropertyNames called on non-object");
+ var props = [], p;
+ for (p in o) {
+ if (ECMAScript.HasOwnProperty(o, p)) {
+ props.push(p);
}
+ }
+ return props;
};
-var ServiceSetPropertiesHeaders = {
- serializedName: "service-setproperties-headers",
- type: {
- name: "Composite",
- className: "ServiceSetPropertiesHeaders",
- modelProperties: {
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
- }
+
+// ES5: Make obj[index] an alias for obj._getter(index)/obj._setter(index, value)
+// for index in 0 ... obj.length
+function makeArrayAccessors(obj) {
+ if (!defineProp) { return; }
+
+ if (obj.length > MAX_ARRAY_LENGTH) throw new RangeError("Array too large for polyfill");
+
+ function makeArrayAccessor(index) {
+ defineProp(obj, index, {
+ 'get': function() { return obj._getter(index); },
+ 'set': function(v) { obj._setter(index, v); },
+ enumerable: true,
+ configurable: false
+ });
+ }
+
+ var i;
+ for (i = 0; i < obj.length; i += 1) {
+ makeArrayAccessor(i);
+ }
+}
+
+// Internal conversion functions:
+// pack() - take a number (interpreted as Type), output a byte array
+// unpack() - take a byte array, output a Type-like number
+
+function as_signed(value, bits) { var s = 32 - bits; return (value << s) >> s; }
+function as_unsigned(value, bits) { var s = 32 - bits; return (value << s) >>> s; }
+
+function packI8(n) { return [n & 0xff]; }
+function unpackI8(bytes) { return as_signed(bytes[0], 8); }
+
+function packU8(n) { return [n & 0xff]; }
+function unpackU8(bytes) { return as_unsigned(bytes[0], 8); }
+
+function packU8Clamped(n) { n = round(Number(n)); return [n < 0 ? 0 : n > 0xff ? 0xff : n & 0xff]; }
+
+function packI16(n) { return [(n >> 8) & 0xff, n & 0xff]; }
+function unpackI16(bytes) { return as_signed(bytes[0] << 8 | bytes[1], 16); }
+
+function packU16(n) { return [(n >> 8) & 0xff, n & 0xff]; }
+function unpackU16(bytes) { return as_unsigned(bytes[0] << 8 | bytes[1], 16); }
+
+function packI32(n) { return [(n >> 24) & 0xff, (n >> 16) & 0xff, (n >> 8) & 0xff, n & 0xff]; }
+function unpackI32(bytes) { return as_signed(bytes[0] << 24 | bytes[1] << 16 | bytes[2] << 8 | bytes[3], 32); }
+
+function packU32(n) { return [(n >> 24) & 0xff, (n >> 16) & 0xff, (n >> 8) & 0xff, n & 0xff]; }
+function unpackU32(bytes) { return as_unsigned(bytes[0] << 24 | bytes[1] << 16 | bytes[2] << 8 | bytes[3], 32); }
+
+function packIEEE754(v, ebits, fbits) {
+
+ var bias = (1 << (ebits - 1)) - 1,
+ s, e, f, ln,
+ i, bits, str, bytes;
+
+ function roundToEven(n) {
+ var w = floor(n), f = n - w;
+ if (f < 0.5)
+ return w;
+ if (f > 0.5)
+ return w + 1;
+ return w % 2 ? w + 1 : w;
+ }
+
+ // Compute sign, exponent, fraction
+ if (v !== v) {
+ // NaN
+ // http://dev.w3.org/2006/webapi/WebIDL/#es-type-mapping
+ e = (1 << ebits) - 1; f = pow(2, fbits - 1); s = 0;
+ } else if (v === Infinity || v === -Infinity) {
+ e = (1 << ebits) - 1; f = 0; s = (v < 0) ? 1 : 0;
+ } else if (v === 0) {
+ e = 0; f = 0; s = (1 / v === -Infinity) ? 1 : 0;
+ } else {
+ s = v < 0;
+ v = abs(v);
+
+ if (v >= pow(2, 1 - bias)) {
+ e = min(floor(log(v) / LN2), 1023);
+ f = roundToEven(v / pow(2, e) * pow(2, fbits));
+ if (f / pow(2, fbits) >= 2) {
+ e = e + 1;
+ f = 1;
+ }
+ if (e > bias) {
+ // Overflow
+ e = (1 << ebits) - 1;
+ f = 0;
+ } else {
+ // Normalized
+ e = e + bias;
+ f = f - pow(2, fbits);
+ }
+ } else {
+ // Denormalized
+ e = 0;
+ f = roundToEven(v / pow(2, 1 - bias - fbits));
}
-};
-var ServiceGetPropertiesHeaders = {
- serializedName: "service-getproperties-headers",
- type: {
- name: "Composite",
- className: "ServiceGetPropertiesHeaders",
- modelProperties: {
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
- }
+ }
+
+ // Pack sign, exponent, fraction
+ bits = [];
+ for (i = fbits; i; i -= 1) { bits.push(f % 2 ? 1 : 0); f = floor(f / 2); }
+ for (i = ebits; i; i -= 1) { bits.push(e % 2 ? 1 : 0); e = floor(e / 2); }
+ bits.push(s ? 1 : 0);
+ bits.reverse();
+ str = bits.join('');
+
+ // Bits to bytes
+ bytes = [];
+ while (str.length) {
+ bytes.push(parseInt(str.substring(0, 8), 2));
+ str = str.substring(8);
+ }
+ return bytes;
+}
+
+function unpackIEEE754(bytes, ebits, fbits) {
+
+ // Bytes to bits
+ var bits = [], i, j, b, str,
+ bias, s, e, f;
+
+ for (i = bytes.length; i; i -= 1) {
+ b = bytes[i - 1];
+ for (j = 8; j; j -= 1) {
+ bits.push(b % 2 ? 1 : 0); b = b >> 1;
}
-};
-var ServiceGetStatisticsHeaders = {
- serializedName: "service-getstatistics-headers",
- type: {
- name: "Composite",
- className: "ServiceGetStatisticsHeaders",
- modelProperties: {
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
- }
+ }
+ bits.reverse();
+ str = bits.join('');
+
+ // Unpack sign, exponent, fraction
+ bias = (1 << (ebits - 1)) - 1;
+ s = parseInt(str.substring(0, 1), 2) ? -1 : 1;
+ e = parseInt(str.substring(1, 1 + ebits), 2);
+ f = parseInt(str.substring(1 + ebits), 2);
+
+ // Produce number
+ if (e === (1 << ebits) - 1) {
+ return f !== 0 ? NaN : s * Infinity;
+ } else if (e > 0) {
+ // Normalized
+ return s * pow(2, e - bias) * (1 + f / pow(2, fbits));
+ } else if (f !== 0) {
+ // Denormalized
+ return s * pow(2, -(bias - 1)) * (f / pow(2, fbits));
+ } else {
+ return s < 0 ? -0 : 0;
+ }
+}
+
+function unpackF64(b) { return unpackIEEE754(b, 11, 52); }
+function packF64(v) { return packIEEE754(v, 11, 52); }
+function unpackF32(b) { return unpackIEEE754(b, 8, 23); }
+function packF32(v) { return packIEEE754(v, 8, 23); }
+
+
+//
+// 3 The ArrayBuffer Type
+//
+
+(function() {
+
+ /** @constructor */
+ var ArrayBuffer = function ArrayBuffer(length) {
+ length = ECMAScript.ToInt32(length);
+ if (length < 0) throw new RangeError('ArrayBuffer size is not a small enough positive integer');
+
+ this.byteLength = length;
+ this._bytes = [];
+ this._bytes.length = length;
+
+ var i;
+ for (i = 0; i < this.byteLength; i += 1) {
+ this._bytes[i] = 0;
}
-};
-var ServiceListContainersSegmentHeaders = {
- serializedName: "service-listcontainerssegment-headers",
- type: {
- name: "Composite",
- className: "ServiceListContainersSegmentHeaders",
- modelProperties: {
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
+
+ configureProperties(this);
+ };
+
+ exports.ArrayBuffer = exports.ArrayBuffer || ArrayBuffer;
+
+ //
+ // 4 The ArrayBufferView Type
+ //
+
+ // NOTE: this constructor is not exported
+ /** @constructor */
+ var ArrayBufferView = function ArrayBufferView() {
+ //this.buffer = null;
+ //this.byteOffset = 0;
+ //this.byteLength = 0;
+ };
+
+ //
+ // 5 The Typed Array View Types
+ //
+
+ function makeConstructor(bytesPerElement, pack, unpack) {
+ // Each TypedArray type requires a distinct constructor instance with
+ // identical logic, which this produces.
+
+ var ctor;
+ ctor = function(buffer, byteOffset, length) {
+ var array, sequence, i, s;
+
+ if (!arguments.length || typeof arguments[0] === 'number') {
+ // Constructor(unsigned long length)
+ this.length = ECMAScript.ToInt32(arguments[0]);
+ if (length < 0) throw new RangeError('ArrayBufferView size is not a small enough positive integer');
+
+ this.byteLength = this.length * this.BYTES_PER_ELEMENT;
+ this.buffer = new ArrayBuffer(this.byteLength);
+ this.byteOffset = 0;
+ } else if (typeof arguments[0] === 'object' && arguments[0].constructor === ctor) {
+ // Constructor(TypedArray array)
+ array = arguments[0];
+
+ this.length = array.length;
+ this.byteLength = this.length * this.BYTES_PER_ELEMENT;
+ this.buffer = new ArrayBuffer(this.byteLength);
+ this.byteOffset = 0;
+
+ for (i = 0; i < this.length; i += 1) {
+ this._setter(i, array._getter(i));
}
- }
-};
-var ServiceGetUserDelegationKeyHeaders = {
- serializedName: "service-getuserdelegationkey-headers",
- type: {
- name: "Composite",
- className: "ServiceGetUserDelegationKeyHeaders",
- modelProperties: {
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
+ } else if (typeof arguments[0] === 'object' &&
+ !(arguments[0] instanceof ArrayBuffer || ECMAScript.Class(arguments[0]) === 'ArrayBuffer')) {
+ // Constructor(sequence array)
+ sequence = arguments[0];
+
+ this.length = ECMAScript.ToUint32(sequence.length);
+ this.byteLength = this.length * this.BYTES_PER_ELEMENT;
+ this.buffer = new ArrayBuffer(this.byteLength);
+ this.byteOffset = 0;
+
+ for (i = 0; i < this.length; i += 1) {
+ s = sequence[i];
+ this._setter(i, Number(s));
}
- }
-};
-var ServiceGetAccountInfoHeaders = {
- serializedName: "service-getaccountinfo-headers",
- type: {
- name: "Composite",
- className: "ServiceGetAccountInfoHeaders",
- modelProperties: {
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- skuName: {
- serializedName: "x-ms-sku-name",
- type: {
- name: "Enum",
- allowedValues: [
- "Standard_LRS",
- "Standard_GRS",
- "Standard_RAGRS",
- "Standard_ZRS",
- "Premium_LRS"
- ]
- }
- },
- accountKind: {
- serializedName: "x-ms-account-kind",
- type: {
- name: "Enum",
- allowedValues: [
- "Storage",
- "BlobStorage",
- "StorageV2",
- "FileStorage",
- "BlockBlobStorage"
- ]
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
+ } else if (typeof arguments[0] === 'object' &&
+ (arguments[0] instanceof ArrayBuffer || ECMAScript.Class(arguments[0]) === 'ArrayBuffer')) {
+ // Constructor(ArrayBuffer buffer,
+ // optional unsigned long byteOffset, optional unsigned long length)
+ this.buffer = buffer;
+
+ this.byteOffset = ECMAScript.ToUint32(byteOffset);
+ if (this.byteOffset > this.buffer.byteLength) {
+ throw new RangeError("byteOffset out of range");
}
- }
-};
-var ServiceSubmitBatchHeaders = {
- serializedName: "service-submitbatch-headers",
- type: {
- name: "Composite",
- className: "ServiceSubmitBatchHeaders",
- modelProperties: {
- contentType: {
- serializedName: "content-type",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
+
+ if (this.byteOffset % this.BYTES_PER_ELEMENT) {
+ // The given byteOffset must be a multiple of the element
+ // size of the specific type, otherwise an exception is raised.
+ throw new RangeError("ArrayBuffer length minus the byteOffset is not a multiple of the element size.");
}
- }
-};
-var ServiceFilterBlobsHeaders = {
- serializedName: "service-filterblobs-headers",
- type: {
- name: "Composite",
- className: "ServiceFilterBlobsHeaders",
- modelProperties: {
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
+
+ if (arguments.length < 3) {
+ this.byteLength = this.buffer.byteLength - this.byteOffset;
+
+ if (this.byteLength % this.BYTES_PER_ELEMENT) {
+ throw new RangeError("length of buffer minus byteOffset not a multiple of the element size");
+ }
+ this.length = this.byteLength / this.BYTES_PER_ELEMENT;
+ } else {
+ this.length = ECMAScript.ToUint32(length);
+ this.byteLength = this.length * this.BYTES_PER_ELEMENT;
}
- }
-};
-var ContainerCreateHeaders = {
- serializedName: "container-create-headers",
- type: {
- name: "Composite",
- className: "ContainerCreateHeaders",
- modelProperties: {
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
+
+ if ((this.byteOffset + this.byteLength) > this.buffer.byteLength) {
+ throw new RangeError("byteOffset and length reference an area beyond the end of the buffer");
}
- }
-};
-var ContainerGetPropertiesHeaders = {
- serializedName: "container-getproperties-headers",
- type: {
- name: "Composite",
- className: "ContainerGetPropertiesHeaders",
- modelProperties: {
- metadata: {
- serializedName: "x-ms-meta",
- type: {
- name: "Dictionary",
- value: {
- type: {
- name: "String"
- }
- }
- },
- headerCollectionPrefix: "x-ms-meta-"
- },
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- leaseDuration: {
- serializedName: "x-ms-lease-duration",
- type: {
- name: "Enum",
- allowedValues: [
- "infinite",
- "fixed"
- ]
- }
- },
- leaseState: {
- serializedName: "x-ms-lease-state",
- type: {
- name: "Enum",
- allowedValues: [
- "available",
- "leased",
- "expired",
- "breaking",
- "broken"
- ]
- }
- },
- leaseStatus: {
- serializedName: "x-ms-lease-status",
- type: {
- name: "Enum",
- allowedValues: [
- "locked",
- "unlocked"
- ]
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- blobPublicAccess: {
- serializedName: "x-ms-blob-public-access",
- type: {
- name: "String"
- }
- },
- hasImmutabilityPolicy: {
- serializedName: "x-ms-has-immutability-policy",
- type: {
- name: "Boolean"
- }
- },
- hasLegalHold: {
- serializedName: "x-ms-has-legal-hold",
- type: {
- name: "Boolean"
- }
- },
- defaultEncryptionScope: {
- serializedName: "x-ms-default-encryption-scope",
- type: {
- name: "String"
- }
- },
- denyEncryptionScopeOverride: {
- serializedName: "x-ms-deny-encryption-scope-override",
- type: {
- name: "Boolean"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
+ } else {
+ throw new TypeError("Unexpected argument type(s)");
+ }
+
+ this.constructor = ctor;
+
+ configureProperties(this);
+ makeArrayAccessors(this);
+ };
+
+ ctor.prototype = new ArrayBufferView();
+ ctor.prototype.BYTES_PER_ELEMENT = bytesPerElement;
+ ctor.prototype._pack = pack;
+ ctor.prototype._unpack = unpack;
+ ctor.BYTES_PER_ELEMENT = bytesPerElement;
+
+ // getter type (unsigned long index);
+ ctor.prototype._getter = function(index) {
+ if (arguments.length < 1) throw new SyntaxError("Not enough arguments");
+
+ index = ECMAScript.ToUint32(index);
+ if (index >= this.length) {
+ return undefined;
+ }
+
+ var bytes = [], i, o;
+ for (i = 0, o = this.byteOffset + index * this.BYTES_PER_ELEMENT;
+ i < this.BYTES_PER_ELEMENT;
+ i += 1, o += 1) {
+ bytes.push(this.buffer._bytes[o]);
+ }
+ return this._unpack(bytes);
+ };
+
+ // NONSTANDARD: convenience alias for getter: type get(unsigned long index);
+ ctor.prototype.get = ctor.prototype._getter;
+
+ // setter void (unsigned long index, type value);
+ ctor.prototype._setter = function(index, value) {
+ if (arguments.length < 2) throw new SyntaxError("Not enough arguments");
+
+ index = ECMAScript.ToUint32(index);
+ if (index >= this.length) {
+ return undefined;
+ }
+
+ var bytes = this._pack(value), i, o;
+ for (i = 0, o = this.byteOffset + index * this.BYTES_PER_ELEMENT;
+ i < this.BYTES_PER_ELEMENT;
+ i += 1, o += 1) {
+ this.buffer._bytes[o] = bytes[i];
+ }
+ };
+
+ // void set(TypedArray array, optional unsigned long offset);
+ // void set(sequence array, optional unsigned long offset);
+ ctor.prototype.set = function(index, value) {
+ if (arguments.length < 1) throw new SyntaxError("Not enough arguments");
+ var array, sequence, offset, len,
+ i, s, d,
+ byteOffset, byteLength, tmp;
+
+ if (typeof arguments[0] === 'object' && arguments[0].constructor === this.constructor) {
+ // void set(TypedArray array, optional unsigned long offset);
+ array = arguments[0];
+ offset = ECMAScript.ToUint32(arguments[1]);
+
+ if (offset + array.length > this.length) {
+ throw new RangeError("Offset plus length of array is out of range");
+ }
+
+ byteOffset = this.byteOffset + offset * this.BYTES_PER_ELEMENT;
+ byteLength = array.length * this.BYTES_PER_ELEMENT;
+
+ if (array.buffer === this.buffer) {
+ tmp = [];
+ for (i = 0, s = array.byteOffset; i < byteLength; i += 1, s += 1) {
+ tmp[i] = array.buffer._bytes[s];
+ }
+ for (i = 0, d = byteOffset; i < byteLength; i += 1, d += 1) {
+ this.buffer._bytes[d] = tmp[i];
+ }
+ } else {
+ for (i = 0, s = array.byteOffset, d = byteOffset;
+ i < byteLength; i += 1, s += 1, d += 1) {
+ this.buffer._bytes[d] = array.buffer._bytes[s];
+ }
+ }
+ } else if (typeof arguments[0] === 'object' && typeof arguments[0].length !== 'undefined') {
+ // void set(sequence array, optional unsigned long offset);
+ sequence = arguments[0];
+ len = ECMAScript.ToUint32(sequence.length);
+ offset = ECMAScript.ToUint32(arguments[1]);
+
+ if (offset + len > this.length) {
+ throw new RangeError("Offset plus length of array is out of range");
+ }
+
+ for (i = 0; i < len; i += 1) {
+ s = sequence[i];
+ this._setter(offset + i, Number(s));
}
+ } else {
+ throw new TypeError("Unexpected argument type(s)");
+ }
+ };
+
+ // TypedArray subarray(long begin, optional long end);
+ ctor.prototype.subarray = function(start, end) {
+ function clamp(v, min, max) { return v < min ? min : v > max ? max : v; }
+
+ start = ECMAScript.ToInt32(start);
+ end = ECMAScript.ToInt32(end);
+
+ if (arguments.length < 1) { start = 0; }
+ if (arguments.length < 2) { end = this.length; }
+
+ if (start < 0) { start = this.length + start; }
+ if (end < 0) { end = this.length + end; }
+
+ start = clamp(start, 0, this.length);
+ end = clamp(end, 0, this.length);
+
+ var len = end - start;
+ if (len < 0) {
+ len = 0;
+ }
+
+ return new this.constructor(
+ this.buffer, this.byteOffset + start * this.BYTES_PER_ELEMENT, len);
+ };
+
+ return ctor;
+ }
+
+ var Int8Array = makeConstructor(1, packI8, unpackI8);
+ var Uint8Array = makeConstructor(1, packU8, unpackU8);
+ var Uint8ClampedArray = makeConstructor(1, packU8Clamped, unpackU8);
+ var Int16Array = makeConstructor(2, packI16, unpackI16);
+ var Uint16Array = makeConstructor(2, packU16, unpackU16);
+ var Int32Array = makeConstructor(4, packI32, unpackI32);
+ var Uint32Array = makeConstructor(4, packU32, unpackU32);
+ var Float32Array = makeConstructor(4, packF32, unpackF32);
+ var Float64Array = makeConstructor(8, packF64, unpackF64);
+
+ exports.Int8Array = exports.Int8Array || Int8Array;
+ exports.Uint8Array = exports.Uint8Array || Uint8Array;
+ exports.Uint8ClampedArray = exports.Uint8ClampedArray || Uint8ClampedArray;
+ exports.Int16Array = exports.Int16Array || Int16Array;
+ exports.Uint16Array = exports.Uint16Array || Uint16Array;
+ exports.Int32Array = exports.Int32Array || Int32Array;
+ exports.Uint32Array = exports.Uint32Array || Uint32Array;
+ exports.Float32Array = exports.Float32Array || Float32Array;
+ exports.Float64Array = exports.Float64Array || Float64Array;
+}());
+
+//
+// 6 The DataView View Type
+//
+
+(function() {
+ function r(array, index) {
+ return ECMAScript.IsCallable(array.get) ? array.get(index) : array[index];
+ }
+
+ var IS_BIG_ENDIAN = (function() {
+ var u16array = new(exports.Uint16Array)([0x1234]),
+ u8array = new(exports.Uint8Array)(u16array.buffer);
+ return r(u8array, 0) === 0x12;
+ }());
+
+ // Constructor(ArrayBuffer buffer,
+ // optional unsigned long byteOffset,
+ // optional unsigned long byteLength)
+ /** @constructor */
+ var DataView = function DataView(buffer, byteOffset, byteLength) {
+ if (arguments.length === 0) {
+ buffer = new exports.ArrayBuffer(0);
+ } else if (!(buffer instanceof exports.ArrayBuffer || ECMAScript.Class(buffer) === 'ArrayBuffer')) {
+ throw new TypeError("TypeError");
+ }
+
+ this.buffer = buffer || new exports.ArrayBuffer(0);
+
+ this.byteOffset = ECMAScript.ToUint32(byteOffset);
+ if (this.byteOffset > this.buffer.byteLength) {
+ throw new RangeError("byteOffset out of range");
+ }
+
+ if (arguments.length < 3) {
+ this.byteLength = this.buffer.byteLength - this.byteOffset;
+ } else {
+ this.byteLength = ECMAScript.ToUint32(byteLength);
+ }
+
+ if ((this.byteOffset + this.byteLength) > this.buffer.byteLength) {
+ throw new RangeError("byteOffset and length reference an area beyond the end of the buffer");
+ }
+
+ configureProperties(this);
+ };
+
+ function makeGetter(arrayType) {
+ return function(byteOffset, littleEndian) {
+
+ byteOffset = ECMAScript.ToUint32(byteOffset);
+
+ if (byteOffset + arrayType.BYTES_PER_ELEMENT > this.byteLength) {
+ throw new RangeError("Array index out of range");
+ }
+ byteOffset += this.byteOffset;
+
+ var uint8Array = new exports.Uint8Array(this.buffer, byteOffset, arrayType.BYTES_PER_ELEMENT),
+ bytes = [], i;
+ for (i = 0; i < arrayType.BYTES_PER_ELEMENT; i += 1) {
+ bytes.push(r(uint8Array, i));
+ }
+
+ if (Boolean(littleEndian) === Boolean(IS_BIG_ENDIAN)) {
+ bytes.reverse();
+ }
+
+ return r(new arrayType(new exports.Uint8Array(bytes).buffer), 0);
+ };
+ }
+
+ DataView.prototype.getUint8 = makeGetter(exports.Uint8Array);
+ DataView.prototype.getInt8 = makeGetter(exports.Int8Array);
+ DataView.prototype.getUint16 = makeGetter(exports.Uint16Array);
+ DataView.prototype.getInt16 = makeGetter(exports.Int16Array);
+ DataView.prototype.getUint32 = makeGetter(exports.Uint32Array);
+ DataView.prototype.getInt32 = makeGetter(exports.Int32Array);
+ DataView.prototype.getFloat32 = makeGetter(exports.Float32Array);
+ DataView.prototype.getFloat64 = makeGetter(exports.Float64Array);
+
+ function makeSetter(arrayType) {
+ return function(byteOffset, value, littleEndian) {
+
+ byteOffset = ECMAScript.ToUint32(byteOffset);
+ if (byteOffset + arrayType.BYTES_PER_ELEMENT > this.byteLength) {
+ throw new RangeError("Array index out of range");
+ }
+
+ // Get bytes
+ var typeArray = new arrayType([value]),
+ byteArray = new exports.Uint8Array(typeArray.buffer),
+ bytes = [], i, byteView;
+
+ for (i = 0; i < arrayType.BYTES_PER_ELEMENT; i += 1) {
+ bytes.push(r(byteArray, i));
+ }
+
+ // Flip if necessary
+ if (Boolean(littleEndian) === Boolean(IS_BIG_ENDIAN)) {
+ bytes.reverse();
+ }
+
+ // Write them
+ byteView = new exports.Uint8Array(this.buffer, byteOffset, arrayType.BYTES_PER_ELEMENT);
+ byteView.set(bytes);
+ };
+ }
+
+ DataView.prototype.setUint8 = makeSetter(exports.Uint8Array);
+ DataView.prototype.setInt8 = makeSetter(exports.Int8Array);
+ DataView.prototype.setUint16 = makeSetter(exports.Uint16Array);
+ DataView.prototype.setInt16 = makeSetter(exports.Int16Array);
+ DataView.prototype.setUint32 = makeSetter(exports.Uint32Array);
+ DataView.prototype.setInt32 = makeSetter(exports.Int32Array);
+ DataView.prototype.setFloat32 = makeSetter(exports.Float32Array);
+ DataView.prototype.setFloat64 = makeSetter(exports.Float64Array);
+
+ exports.DataView = exports.DataView || DataView;
+
+}());
+
+
+/***/ }),
+/* 318 */
+/***/ (function(__unusedmodule, exports) {
+
+"use strict";
+
+/*
+ * Copyright The OpenTelemetry Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+Object.defineProperty(exports, "__esModule", { value: true });
+//# sourceMappingURL=BatchObserverResult.js.map
+
+/***/ }),
+/* 319 */,
+/* 320 */,
+/* 321 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+module.exports = function(
+ Promise, PromiseArray, tryConvertToPromise, apiRejection) {
+var util = __webpack_require__(248);
+var isObject = util.isObject;
+var es5 = __webpack_require__(883);
+var Es6Map;
+if (typeof Map === "function") Es6Map = Map;
+
+var mapToEntries = (function() {
+ var index = 0;
+ var size = 0;
+
+ function extractEntry(value, key) {
+ this[index] = value;
+ this[index + size] = key;
+ index++;
+ }
+
+ return function mapToEntries(map) {
+ size = map.size;
+ index = 0;
+ var ret = new Array(map.size * 2);
+ map.forEach(extractEntry, ret);
+ return ret;
+ };
+})();
+
+var entriesToMap = function(entries) {
+ var ret = new Es6Map();
+ var length = entries.length / 2 | 0;
+ for (var i = 0; i < length; ++i) {
+ var key = entries[length + i];
+ var value = entries[i];
+ ret.set(key, value);
}
+ return ret;
};
-var ContainerDeleteHeaders = {
- serializedName: "container-delete-headers",
- type: {
- name: "Composite",
- className: "ContainerDeleteHeaders",
- modelProperties: {
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
+
+function PropertiesPromiseArray(obj) {
+ var isMap = false;
+ var entries;
+ if (Es6Map !== undefined && obj instanceof Es6Map) {
+ entries = mapToEntries(obj);
+ isMap = true;
+ } else {
+ var keys = es5.keys(obj);
+ var len = keys.length;
+ entries = new Array(len * 2);
+ for (var i = 0; i < len; ++i) {
+ var key = keys[i];
+ entries[i] = obj[key];
+ entries[i + len] = key;
}
}
-};
-var ContainerSetMetadataHeaders = {
- serializedName: "container-setmetadata-headers",
- type: {
- name: "Composite",
- className: "ContainerSetMetadataHeaders",
- modelProperties: {
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
+ this.constructor$(entries);
+ this._isMap = isMap;
+ this._init$(undefined, isMap ? -6 : -3);
+}
+util.inherits(PropertiesPromiseArray, PromiseArray);
+
+PropertiesPromiseArray.prototype._init = function () {};
+
+PropertiesPromiseArray.prototype._promiseFulfilled = function (value, index) {
+ this._values[index] = value;
+ var totalResolved = ++this._totalResolved;
+ if (totalResolved >= this._length) {
+ var val;
+ if (this._isMap) {
+ val = entriesToMap(this._values);
+ } else {
+ val = {};
+ var keyOffset = this.length();
+ for (var i = 0, len = this.length(); i < len; ++i) {
+ val[this._values[i + keyOffset]] = this._values[i];
}
}
+ this._resolve(val);
+ return true;
}
+ return false;
};
-var ContainerGetAccessPolicyHeaders = {
- serializedName: "container-getaccesspolicy-headers",
- type: {
- name: "Composite",
- className: "ContainerGetAccessPolicyHeaders",
- modelProperties: {
- blobPublicAccess: {
- serializedName: "x-ms-blob-public-access",
- type: {
- name: "String"
- }
- },
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
- }
- }
+
+PropertiesPromiseArray.prototype.shouldCopyValues = function () {
+ return false;
};
-var ContainerSetAccessPolicyHeaders = {
- serializedName: "container-setaccesspolicy-headers",
- type: {
- name: "Composite",
- className: "ContainerSetAccessPolicyHeaders",
- modelProperties: {
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
- }
- }
+
+PropertiesPromiseArray.prototype.getActualLength = function (len) {
+ return len >> 1;
};
-var ContainerRestoreHeaders = {
- serializedName: "container-restore-headers",
- type: {
- name: "Composite",
- className: "ContainerRestoreHeaders",
- modelProperties: {
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
- }
+
+function props(promises) {
+ var ret;
+ var castValue = tryConvertToPromise(promises);
+
+ if (!isObject(castValue)) {
+ return apiRejection("cannot await properties of a non-object\u000a\u000a See http://goo.gl/MqrFmX\u000a");
+ } else if (castValue instanceof Promise) {
+ ret = castValue._then(
+ Promise.props, undefined, undefined, undefined, undefined);
+ } else {
+ ret = new PropertiesPromiseArray(castValue).promise();
}
-};
-var ContainerAcquireLeaseHeaders = {
- serializedName: "container-acquirelease-headers",
- type: {
- name: "Composite",
- className: "ContainerAcquireLeaseHeaders",
- modelProperties: {
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- leaseId: {
- serializedName: "x-ms-lease-id",
- type: {
- name: "String"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
- }
+
+ if (castValue instanceof Promise) {
+ ret._propagateFrom(castValue, 2);
}
+ return ret;
+}
+
+Promise.prototype.props = function () {
+ return props(this);
};
-var ContainerReleaseLeaseHeaders = {
- serializedName: "container-releaselease-headers",
- type: {
- name: "Composite",
- className: "ContainerReleaseLeaseHeaders",
- modelProperties: {
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
- }
- }
+
+Promise.props = function (promises) {
+ return props(promises);
};
-var ContainerRenewLeaseHeaders = {
- serializedName: "container-renewlease-headers",
- type: {
- name: "Composite",
- className: "ContainerRenewLeaseHeaders",
- modelProperties: {
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- leaseId: {
- serializedName: "x-ms-lease-id",
- type: {
- name: "String"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
- }
- }
};
-var ContainerBreakLeaseHeaders = {
- serializedName: "container-breaklease-headers",
- type: {
- name: "Composite",
- className: "ContainerBreakLeaseHeaders",
- modelProperties: {
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- leaseTime: {
- serializedName: "x-ms-lease-time",
- type: {
- name: "Number"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
- }
+
+
+/***/ }),
+/* 322 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = uidNumber
+
+// This module calls into get-uid-gid.js, which sets the
+// uid and gid to the supplied argument, in order to find out their
+// numeric value. This can't be done in the main node process,
+// because otherwise node would be running as that user from this
+// point on.
+
+var child_process = __webpack_require__(129)
+ , path = __webpack_require__(622)
+ , uidSupport = process.getuid && process.setuid
+ , uidCache = {}
+ , gidCache = {}
+
+function uidNumber (uid, gid, cb) {
+ if (!uidSupport) return cb()
+ if (typeof cb !== "function") cb = gid, gid = null
+ if (typeof cb !== "function") cb = uid, uid = null
+ if (gid == null) gid = process.getgid()
+ if (uid == null) uid = process.getuid()
+ if (!isNaN(gid)) gid = gidCache[gid] = +gid
+ if (!isNaN(uid)) uid = uidCache[uid] = +uid
+
+ if (uidCache.hasOwnProperty(uid)) uid = uidCache[uid]
+ if (gidCache.hasOwnProperty(gid)) gid = gidCache[gid]
+
+ if (typeof gid === "number" && typeof uid === "number") {
+ return process.nextTick(cb.bind(null, null, uid, gid))
+ }
+
+ var getter = __webpack_require__.ab + "get-uid-gid.js"
+
+ child_process.execFile( process.execPath
+ , [getter, uid, gid]
+ , function (code, out, stderr) {
+ if (code) {
+ var er = new Error("could not get uid/gid\n" + stderr)
+ er.code = code
+ return cb(er)
}
-};
-var ContainerChangeLeaseHeaders = {
- serializedName: "container-changelease-headers",
- type: {
- name: "Composite",
- className: "ContainerChangeLeaseHeaders",
- modelProperties: {
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- leaseId: {
- serializedName: "x-ms-lease-id",
- type: {
- name: "String"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
- }
+
+ try {
+ out = JSON.parse(out+"")
+ } catch (ex) {
+ return cb(ex)
}
-};
-var ContainerListBlobFlatSegmentHeaders = {
- serializedName: "container-listblobflatsegment-headers",
- type: {
- name: "Composite",
- className: "ContainerListBlobFlatSegmentHeaders",
- modelProperties: {
- contentType: {
- serializedName: "content-type",
- type: {
- name: "String"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
- }
+
+ if (out.error) {
+ var er = new Error(out.error)
+ er.errno = out.errno
+ return cb(er)
}
-};
-var ContainerListBlobHierarchySegmentHeaders = {
- serializedName: "container-listblobhierarchysegment-headers",
- type: {
- name: "Composite",
- className: "ContainerListBlobHierarchySegmentHeaders",
- modelProperties: {
- contentType: {
- serializedName: "content-type",
- type: {
- name: "String"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
- }
+
+ if (isNaN(out.uid) || isNaN(out.gid)) return cb(new Error(
+ "Could not get uid/gid: "+JSON.stringify(out)))
+
+ cb(null, uidCache[uid] = +out.uid, gidCache[gid] = +out.gid)
+ })
+}
+
+
+/***/ }),
+/* 323 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+const outside = __webpack_require__(462)
+// Determine if version is less than all the versions possible in the range
+const ltr = (version, range, options) => outside(version, range, '<', options)
+module.exports = ltr
+
+
+/***/ }),
+/* 324 */,
+/* 325 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+const run_1 = __webpack_require__(180);
+const tools_1 = __webpack_require__(534);
+run_1.run().catch(tools_1.handleError);
+
+
+/***/ }),
+/* 326 */
+/***/ (function(__unusedmodule, exports) {
+
+"use strict";
+
+/*
+ * Copyright The OpenTelemetry Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+Object.defineProperty(exports, "__esModule", { value: true });
+//# sourceMappingURL=ObserverResult.js.map
+
+/***/ }),
+/* 327 */
+/***/ (function(__unusedmodule, exports) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+/**
+ * Indicates whether a pattern matches a path
+ */
+var MatchKind;
+(function (MatchKind) {
+ /** Not matched */
+ MatchKind[MatchKind["None"] = 0] = "None";
+ /** Matched if the path is a directory */
+ MatchKind[MatchKind["Directory"] = 1] = "Directory";
+ /** Matched if the path is a regular file */
+ MatchKind[MatchKind["File"] = 2] = "File";
+ /** Matched */
+ MatchKind[MatchKind["All"] = 3] = "All";
+})(MatchKind = exports.MatchKind || (exports.MatchKind = {}));
+//# sourceMappingURL=internal-match-kind.js.map
+
+/***/ }),
+/* 328 */
+/***/ (function(module, exports, __webpack_require__) {
+
+const { MAX_SAFE_COMPONENT_LENGTH } = __webpack_require__(181)
+const debug = __webpack_require__(548)
+exports = module.exports = {}
+
+// The actual regexps go on exports.re
+const re = exports.re = []
+const src = exports.src = []
+const t = exports.t = {}
+let R = 0
+
+const createToken = (name, value, isGlobal) => {
+ const index = R++
+ debug(index, value)
+ t[name] = index
+ src[index] = value
+ re[index] = new RegExp(value, isGlobal ? 'g' : undefined)
+}
+
+// The following Regular Expressions can be used for tokenizing,
+// validating, and parsing SemVer version strings.
+
+// ## Numeric Identifier
+// A single `0`, or a non-zero digit followed by zero or more digits.
+
+createToken('NUMERICIDENTIFIER', '0|[1-9]\\d*')
+createToken('NUMERICIDENTIFIERLOOSE', '[0-9]+')
+
+// ## Non-numeric Identifier
+// Zero or more digits, followed by a letter or hyphen, and then zero or
+// more letters, digits, or hyphens.
+
+createToken('NONNUMERICIDENTIFIER', '\\d*[a-zA-Z-][a-zA-Z0-9-]*')
+
+// ## Main Version
+// Three dot-separated numeric identifiers.
+
+createToken('MAINVERSION', `(${src[t.NUMERICIDENTIFIER]})\\.` +
+ `(${src[t.NUMERICIDENTIFIER]})\\.` +
+ `(${src[t.NUMERICIDENTIFIER]})`)
+
+createToken('MAINVERSIONLOOSE', `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` +
+ `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` +
+ `(${src[t.NUMERICIDENTIFIERLOOSE]})`)
+
+// ## Pre-release Version Identifier
+// A numeric identifier, or a non-numeric identifier.
+
+createToken('PRERELEASEIDENTIFIER', `(?:${src[t.NUMERICIDENTIFIER]
+}|${src[t.NONNUMERICIDENTIFIER]})`)
+
+createToken('PRERELEASEIDENTIFIERLOOSE', `(?:${src[t.NUMERICIDENTIFIERLOOSE]
+}|${src[t.NONNUMERICIDENTIFIER]})`)
+
+// ## Pre-release Version
+// Hyphen, followed by one or more dot-separated pre-release version
+// identifiers.
+
+createToken('PRERELEASE', `(?:-(${src[t.PRERELEASEIDENTIFIER]
+}(?:\\.${src[t.PRERELEASEIDENTIFIER]})*))`)
+
+createToken('PRERELEASELOOSE', `(?:-?(${src[t.PRERELEASEIDENTIFIERLOOSE]
+}(?:\\.${src[t.PRERELEASEIDENTIFIERLOOSE]})*))`)
+
+// ## Build Metadata Identifier
+// Any combination of digits, letters, or hyphens.
+
+createToken('BUILDIDENTIFIER', '[0-9A-Za-z-]+')
+
+// ## Build Metadata
+// Plus sign, followed by one or more period-separated build metadata
+// identifiers.
+
+createToken('BUILD', `(?:\\+(${src[t.BUILDIDENTIFIER]
+}(?:\\.${src[t.BUILDIDENTIFIER]})*))`)
+
+// ## Full Version String
+// A main version, followed optionally by a pre-release version and
+// build metadata.
+
+// Note that the only major, minor, patch, and pre-release sections of
+// the version string are capturing groups. The build metadata is not a
+// capturing group, because it should not ever be used in version
+// comparison.
+
+createToken('FULLPLAIN', `v?${src[t.MAINVERSION]
+}${src[t.PRERELEASE]}?${
+ src[t.BUILD]}?`)
+
+createToken('FULL', `^${src[t.FULLPLAIN]}$`)
+
+// like full, but allows v1.2.3 and =1.2.3, which people do sometimes.
+// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty
+// common in the npm registry.
+createToken('LOOSEPLAIN', `[v=\\s]*${src[t.MAINVERSIONLOOSE]
+}${src[t.PRERELEASELOOSE]}?${
+ src[t.BUILD]}?`)
+
+createToken('LOOSE', `^${src[t.LOOSEPLAIN]}$`)
+
+createToken('GTLT', '((?:<|>)?=?)')
+
+// Something like "2.*" or "1.2.x".
+// Note that "x.x" is a valid xRange identifer, meaning "any version"
+// Only the first item is strictly required.
+createToken('XRANGEIDENTIFIERLOOSE', `${src[t.NUMERICIDENTIFIERLOOSE]}|x|X|\\*`)
+createToken('XRANGEIDENTIFIER', `${src[t.NUMERICIDENTIFIER]}|x|X|\\*`)
+
+createToken('XRANGEPLAIN', `[v=\\s]*(${src[t.XRANGEIDENTIFIER]})` +
+ `(?:\\.(${src[t.XRANGEIDENTIFIER]})` +
+ `(?:\\.(${src[t.XRANGEIDENTIFIER]})` +
+ `(?:${src[t.PRERELEASE]})?${
+ src[t.BUILD]}?` +
+ `)?)?`)
+
+createToken('XRANGEPLAINLOOSE', `[v=\\s]*(${src[t.XRANGEIDENTIFIERLOOSE]})` +
+ `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` +
+ `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` +
+ `(?:${src[t.PRERELEASELOOSE]})?${
+ src[t.BUILD]}?` +
+ `)?)?`)
+
+createToken('XRANGE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAIN]}$`)
+createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`)
+
+// Coercion.
+// Extract anything that could conceivably be a part of a valid semver
+createToken('COERCE', `${'(^|[^\\d])' +
+ '(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` +
+ `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` +
+ `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` +
+ `(?:$|[^\\d])`)
+createToken('COERCERTL', src[t.COERCE], true)
+
+// Tilde ranges.
+// Meaning is "reasonably at or greater than"
+createToken('LONETILDE', '(?:~>?)')
+
+createToken('TILDETRIM', `(\\s*)${src[t.LONETILDE]}\\s+`, true)
+exports.tildeTrimReplace = '$1~'
+
+createToken('TILDE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAIN]}$`)
+createToken('TILDELOOSE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAINLOOSE]}$`)
+
+// Caret ranges.
+// Meaning is "at least and backwards compatible with"
+createToken('LONECARET', '(?:\\^)')
+
+createToken('CARETTRIM', `(\\s*)${src[t.LONECARET]}\\s+`, true)
+exports.caretTrimReplace = '$1^'
+
+createToken('CARET', `^${src[t.LONECARET]}${src[t.XRANGEPLAIN]}$`)
+createToken('CARETLOOSE', `^${src[t.LONECARET]}${src[t.XRANGEPLAINLOOSE]}$`)
+
+// A simple gt/lt/eq thing, or just "" to indicate "any version"
+createToken('COMPARATORLOOSE', `^${src[t.GTLT]}\\s*(${src[t.LOOSEPLAIN]})$|^$`)
+createToken('COMPARATOR', `^${src[t.GTLT]}\\s*(${src[t.FULLPLAIN]})$|^$`)
+
+// An expression to strip any whitespace between the gtlt and the thing
+// it modifies, so that `> 1.2.3` ==> `>1.2.3`
+createToken('COMPARATORTRIM', `(\\s*)${src[t.GTLT]
+}\\s*(${src[t.LOOSEPLAIN]}|${src[t.XRANGEPLAIN]})`, true)
+exports.comparatorTrimReplace = '$1$2$3'
+
+// Something like `1.2.3 - 1.2.4`
+// Note that these all use the loose form, because they'll be
+// checked against either the strict or loose comparator form
+// later.
+createToken('HYPHENRANGE', `^\\s*(${src[t.XRANGEPLAIN]})` +
+ `\\s+-\\s+` +
+ `(${src[t.XRANGEPLAIN]})` +
+ `\\s*$`)
+
+createToken('HYPHENRANGELOOSE', `^\\s*(${src[t.XRANGEPLAINLOOSE]})` +
+ `\\s+-\\s+` +
+ `(${src[t.XRANGEPLAINLOOSE]})` +
+ `\\s*$`)
+
+// Star ranges basically just allow anything at all.
+createToken('STAR', '(<|>)?=?\\s*\\*')
+// >=0.0.0 is like a star
+createToken('GTE0', '^\\s*>=\\s*0\.0\.0\\s*$')
+createToken('GTE0PRE', '^\\s*>=\\s*0\.0\.0-0\\s*$')
+
+
+/***/ }),
+/* 329 */,
+/* 330 */,
+/* 331 */,
+/* 332 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+/*!
+ * Copyright (c) 2015, Salesforce.com, Inc.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ *
+ * 3. Neither the name of Salesforce.com nor the names of its contributors may
+ * be used to endorse or promote products derived from this software without
+ * specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+
+const { fromCallback } = __webpack_require__(147);
+const Store = __webpack_require__(338).Store;
+const permuteDomain = __webpack_require__(89).permuteDomain;
+const pathMatch = __webpack_require__(348).pathMatch;
+const util = __webpack_require__(669);
+
+class MemoryCookieStore extends Store {
+ constructor() {
+ super();
+ this.synchronous = true;
+ this.idx = {};
+ if (util.inspect.custom) {
+ this[util.inspect.custom] = this.inspect;
}
-};
-var ContainerGetAccountInfoHeaders = {
- serializedName: "container-getaccountinfo-headers",
- type: {
- name: "Composite",
- className: "ContainerGetAccountInfoHeaders",
- modelProperties: {
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- skuName: {
- serializedName: "x-ms-sku-name",
- type: {
- name: "Enum",
- allowedValues: [
- "Standard_LRS",
- "Standard_GRS",
- "Standard_RAGRS",
- "Standard_ZRS",
- "Premium_LRS"
- ]
- }
- },
- accountKind: {
- serializedName: "x-ms-account-kind",
- type: {
- name: "Enum",
- allowedValues: [
- "Storage",
- "BlobStorage",
- "StorageV2",
- "FileStorage",
- "BlockBlobStorage"
- ]
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
+ }
+
+ inspect() {
+ return `{ idx: ${util.inspect(this.idx, false, 2)} }`;
+ }
+
+ findCookie(domain, path, key, cb) {
+ if (!this.idx[domain]) {
+ return cb(null, undefined);
+ }
+ if (!this.idx[domain][path]) {
+ return cb(null, undefined);
+ }
+ return cb(null, this.idx[domain][path][key] || null);
+ }
+ findCookies(domain, path, allowSpecialUseDomain, cb) {
+ const results = [];
+ if (typeof allowSpecialUseDomain === "function") {
+ cb = allowSpecialUseDomain;
+ allowSpecialUseDomain = false;
+ }
+ if (!domain) {
+ return cb(null, []);
+ }
+
+ let pathMatcher;
+ if (!path) {
+ // null means "all paths"
+ pathMatcher = function matchAll(domainIndex) {
+ for (const curPath in domainIndex) {
+ const pathIndex = domainIndex[curPath];
+ for (const key in pathIndex) {
+ results.push(pathIndex[key]);
+ }
}
+ };
+ } else {
+ pathMatcher = function matchRFC(domainIndex) {
+ //NOTE: we should use path-match algorithm from S5.1.4 here
+ //(see : https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/canonical_cookie.cc#L299)
+ Object.keys(domainIndex).forEach(cookiePath => {
+ if (pathMatch(path, cookiePath)) {
+ const pathIndex = domainIndex[cookiePath];
+ for (const key in pathIndex) {
+ results.push(pathIndex[key]);
+ }
+ }
+ });
+ };
+ }
+
+ const domains = permuteDomain(domain, allowSpecialUseDomain) || [domain];
+ const idx = this.idx;
+ domains.forEach(curDomain => {
+ const domainIndex = idx[curDomain];
+ if (!domainIndex) {
+ return;
+ }
+ pathMatcher(domainIndex);
+ });
+
+ cb(null, results);
+ }
+
+ putCookie(cookie, cb) {
+ if (!this.idx[cookie.domain]) {
+ this.idx[cookie.domain] = {};
+ }
+ if (!this.idx[cookie.domain][cookie.path]) {
+ this.idx[cookie.domain][cookie.path] = {};
+ }
+ this.idx[cookie.domain][cookie.path][cookie.key] = cookie;
+ cb(null);
+ }
+ updateCookie(oldCookie, newCookie, cb) {
+ // updateCookie() may avoid updating cookies that are identical. For example,
+ // lastAccessed may not be important to some stores and an equality
+ // comparison could exclude that field.
+ this.putCookie(newCookie, cb);
+ }
+ removeCookie(domain, path, key, cb) {
+ if (
+ this.idx[domain] &&
+ this.idx[domain][path] &&
+ this.idx[domain][path][key]
+ ) {
+ delete this.idx[domain][path][key];
+ }
+ cb(null);
+ }
+ removeCookies(domain, path, cb) {
+ if (this.idx[domain]) {
+ if (path) {
+ delete this.idx[domain][path];
+ } else {
+ delete this.idx[domain];
+ }
}
+ return cb(null);
+ }
+ removeAllCookies(cb) {
+ this.idx = {};
+ return cb(null);
+ }
+ getAllCookies(cb) {
+ const cookies = [];
+ const idx = this.idx;
+
+ const domains = Object.keys(idx);
+ domains.forEach(domain => {
+ const paths = Object.keys(idx[domain]);
+ paths.forEach(path => {
+ const keys = Object.keys(idx[domain][path]);
+ keys.forEach(key => {
+ if (key !== null) {
+ cookies.push(idx[domain][path][key]);
+ }
+ });
+ });
+ });
+
+ // Sort by creationIndex so deserializing retains the creation order.
+ // When implementing your own store, this SHOULD retain the order too
+ cookies.sort((a, b) => {
+ return (a.creationIndex || 0) - (b.creationIndex || 0);
+ });
+
+ cb(null, cookies);
+ }
+}
+
+[
+ "findCookie",
+ "findCookies",
+ "putCookie",
+ "updateCookie",
+ "removeCookie",
+ "removeCookies",
+ "removeAllCookies",
+ "getAllCookies"
+].forEach(name => {
+ MemoryCookieStore[name] = fromCallback(MemoryCookieStore.prototype[name]);
+});
+
+exports.MemoryCookieStore = MemoryCookieStore;
+
+
+/***/ }),
+/* 333 */,
+/* 334 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports =
+{
+ parallel : __webpack_require__(424),
+ serial : __webpack_require__(91),
+ serialOrdered : __webpack_require__(892)
};
-var BlobDownloadHeaders = {
- serializedName: "blob-download-headers",
- type: {
- name: "Composite",
- className: "BlobDownloadHeaders",
- modelProperties: {
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- metadata: {
- serializedName: "x-ms-meta",
- type: {
- name: "Dictionary",
- value: {
- type: {
- name: "String"
- }
- }
- },
- headerCollectionPrefix: "x-ms-meta-"
- },
- objectReplicationPolicyId: {
- serializedName: "x-ms-or-policy-id",
- type: {
- name: "String"
- }
- },
- objectReplicationRules: {
- serializedName: "x-ms-or",
- type: {
- name: "Dictionary",
- value: {
- type: {
- name: "String"
- }
- }
- },
- headerCollectionPrefix: "x-ms-or-"
- },
- contentLength: {
- serializedName: "content-length",
- type: {
- name: "Number"
- }
- },
- contentType: {
- serializedName: "content-type",
- type: {
- name: "String"
- }
- },
- contentRange: {
- serializedName: "content-range",
- type: {
- name: "String"
- }
- },
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- contentMD5: {
- serializedName: "content-md5",
- type: {
- name: "ByteArray"
- }
- },
- contentEncoding: {
- serializedName: "content-encoding",
- type: {
- name: "String"
- }
- },
- cacheControl: {
- serializedName: "cache-control",
- type: {
- name: "String"
- }
- },
- contentDisposition: {
- serializedName: "content-disposition",
- type: {
- name: "String"
- }
- },
- contentLanguage: {
- serializedName: "content-language",
- type: {
- name: "String"
- }
- },
- blobSequenceNumber: {
- serializedName: "x-ms-blob-sequence-number",
- type: {
- name: "Number"
- }
- },
- blobType: {
- serializedName: "x-ms-blob-type",
- type: {
- name: "Enum",
- allowedValues: [
- "BlockBlob",
- "PageBlob",
- "AppendBlob"
- ]
- }
- },
- copyCompletedOn: {
- serializedName: "x-ms-copy-completion-time",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- copyStatusDescription: {
- serializedName: "x-ms-copy-status-description",
- type: {
- name: "String"
- }
- },
- copyId: {
- serializedName: "x-ms-copy-id",
- type: {
- name: "String"
- }
- },
- copyProgress: {
- serializedName: "x-ms-copy-progress",
- type: {
- name: "String"
- }
- },
- copySource: {
- serializedName: "x-ms-copy-source",
- type: {
- name: "String"
- }
- },
- copyStatus: {
- serializedName: "x-ms-copy-status",
- type: {
- name: "Enum",
- allowedValues: [
- "pending",
- "success",
- "aborted",
- "failed"
- ]
- }
- },
- leaseDuration: {
- serializedName: "x-ms-lease-duration",
- type: {
- name: "Enum",
- allowedValues: [
- "infinite",
- "fixed"
- ]
- }
- },
- leaseState: {
- serializedName: "x-ms-lease-state",
- type: {
- name: "Enum",
- allowedValues: [
- "available",
- "leased",
- "expired",
- "breaking",
- "broken"
- ]
- }
- },
- leaseStatus: {
- serializedName: "x-ms-lease-status",
- type: {
- name: "Enum",
- allowedValues: [
- "locked",
- "unlocked"
- ]
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- versionId: {
- serializedName: "x-ms-version-id",
- type: {
- name: "String"
- }
- },
- acceptRanges: {
- serializedName: "accept-ranges",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- blobCommittedBlockCount: {
- serializedName: "x-ms-blob-committed-block-count",
- type: {
- name: "Number"
- }
- },
- isServerEncrypted: {
- serializedName: "x-ms-server-encrypted",
- type: {
- name: "Boolean"
- }
- },
- encryptionKeySha256: {
- serializedName: "x-ms-encryption-key-sha256",
- type: {
- name: "String"
- }
- },
- encryptionScope: {
- serializedName: "x-ms-encryption-scope",
- type: {
- name: "String"
- }
- },
- blobContentMD5: {
- serializedName: "x-ms-blob-content-md5",
- type: {
- name: "ByteArray"
- }
- },
- tagCount: {
- serializedName: "x-ms-tag-count",
- type: {
- name: "Number"
- }
- },
- isSealed: {
- serializedName: "x-ms-blob-sealed",
- type: {
- name: "Boolean"
- }
- },
- contentCrc64: {
- serializedName: "x-ms-content-crc64",
- type: {
- name: "ByteArray"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
- }
- }
+
+
+/***/ }),
+/* 335 */,
+/* 336 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+var MurmurHash3 = __webpack_require__(188)
+
+module.exports = function (uniq) {
+ if (uniq) {
+ var hash = new MurmurHash3(uniq)
+ return ('00000000' + hash.result().toString(16)).substr(-8)
+ } else {
+ return (Math.random().toString(16) + '0000000').substr(2, 8)
+ }
+}
+
+
+/***/ }),
+/* 337 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+/*!
+ * humanize-ms - index.js
+ * Copyright(c) 2014 dead_horse
+ * MIT Licensed
+ */
+
+
+
+/**
+ * Module dependencies.
+ */
+
+var util = __webpack_require__(669);
+var ms = __webpack_require__(527);
+
+module.exports = function (t) {
+ if (typeof t === 'number') return t;
+ var r = ms(t);
+ if (r === undefined) {
+ var err = new Error(util.format('humanize-ms(%j) result undefined', t));
+ console.warn(err.stack);
+ }
+ return r;
};
-var BlobGetPropertiesHeaders = {
- serializedName: "blob-getproperties-headers",
- type: {
- name: "Composite",
- className: "BlobGetPropertiesHeaders",
- modelProperties: {
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- createdOn: {
- serializedName: "x-ms-creation-time",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- metadata: {
- serializedName: "x-ms-meta",
- type: {
- name: "Dictionary",
- value: {
- type: {
- name: "String"
- }
- }
- },
- headerCollectionPrefix: "x-ms-meta-"
- },
- objectReplicationPolicyId: {
- serializedName: "x-ms-or-policy-id",
- type: {
- name: "String"
- }
- },
- objectReplicationRules: {
- serializedName: "x-ms-or",
- type: {
- name: "Dictionary",
- value: {
- type: {
- name: "String"
- }
- }
- },
- headerCollectionPrefix: "x-ms-or-"
- },
- blobType: {
- serializedName: "x-ms-blob-type",
- type: {
- name: "Enum",
- allowedValues: [
- "BlockBlob",
- "PageBlob",
- "AppendBlob"
- ]
- }
- },
- copyCompletedOn: {
- serializedName: "x-ms-copy-completion-time",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- copyStatusDescription: {
- serializedName: "x-ms-copy-status-description",
- type: {
- name: "String"
- }
- },
- copyId: {
- serializedName: "x-ms-copy-id",
- type: {
- name: "String"
- }
- },
- copyProgress: {
- serializedName: "x-ms-copy-progress",
- type: {
- name: "String"
- }
- },
- copySource: {
- serializedName: "x-ms-copy-source",
- type: {
- name: "String"
- }
- },
- copyStatus: {
- serializedName: "x-ms-copy-status",
- type: {
- name: "Enum",
- allowedValues: [
- "pending",
- "success",
- "aborted",
- "failed"
- ]
- }
- },
- isIncrementalCopy: {
- serializedName: "x-ms-incremental-copy",
- type: {
- name: "Boolean"
- }
- },
- destinationSnapshot: {
- serializedName: "x-ms-copy-destination-snapshot",
- type: {
- name: "String"
- }
- },
- leaseDuration: {
- serializedName: "x-ms-lease-duration",
- type: {
- name: "Enum",
- allowedValues: [
- "infinite",
- "fixed"
- ]
- }
- },
- leaseState: {
- serializedName: "x-ms-lease-state",
- type: {
- name: "Enum",
- allowedValues: [
- "available",
- "leased",
- "expired",
- "breaking",
- "broken"
- ]
- }
- },
- leaseStatus: {
- serializedName: "x-ms-lease-status",
- type: {
- name: "Enum",
- allowedValues: [
- "locked",
- "unlocked"
- ]
- }
- },
- contentLength: {
- serializedName: "content-length",
- type: {
- name: "Number"
- }
- },
- contentType: {
- serializedName: "content-type",
- type: {
- name: "String"
- }
- },
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- contentMD5: {
- serializedName: "content-md5",
- type: {
- name: "ByteArray"
- }
- },
- contentEncoding: {
- serializedName: "content-encoding",
- type: {
- name: "String"
- }
- },
- contentDisposition: {
- serializedName: "content-disposition",
- type: {
- name: "String"
- }
- },
- contentLanguage: {
- serializedName: "content-language",
- type: {
- name: "String"
- }
- },
- cacheControl: {
- serializedName: "cache-control",
- type: {
- name: "String"
- }
- },
- blobSequenceNumber: {
- serializedName: "x-ms-blob-sequence-number",
- type: {
- name: "Number"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- acceptRanges: {
- serializedName: "accept-ranges",
- type: {
- name: "String"
- }
- },
- blobCommittedBlockCount: {
- serializedName: "x-ms-blob-committed-block-count",
- type: {
- name: "Number"
- }
- },
- isServerEncrypted: {
- serializedName: "x-ms-server-encrypted",
- type: {
- name: "Boolean"
- }
- },
- encryptionKeySha256: {
- serializedName: "x-ms-encryption-key-sha256",
- type: {
- name: "String"
- }
- },
- encryptionScope: {
- serializedName: "x-ms-encryption-scope",
- type: {
- name: "String"
- }
- },
- accessTier: {
- serializedName: "x-ms-access-tier",
- type: {
- name: "String"
- }
- },
- accessTierInferred: {
- serializedName: "x-ms-access-tier-inferred",
- type: {
- name: "Boolean"
- }
- },
- archiveStatus: {
- serializedName: "x-ms-archive-status",
- type: {
- name: "String"
- }
- },
- accessTierChangedOn: {
- serializedName: "x-ms-access-tier-change-time",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- versionId: {
- serializedName: "x-ms-version-id",
- type: {
- name: "String"
- }
- },
- isCurrentVersion: {
- serializedName: "x-ms-is-current-version",
- type: {
- name: "Boolean"
- }
- },
- tagCount: {
- serializedName: "x-ms-tag-count",
- type: {
- name: "Number"
- }
- },
- expiresOn: {
- serializedName: "x-ms-expiry-time",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- isSealed: {
- serializedName: "x-ms-blob-sealed",
- type: {
- name: "Boolean"
- }
- },
- rehydratePriority: {
- serializedName: "x-ms-rehydrate-priority",
- type: {
- name: "String"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
- }
- }
-};
-var BlobDeleteHeaders = {
- serializedName: "blob-delete-headers",
- type: {
- name: "Composite",
- className: "BlobDeleteHeaders",
- modelProperties: {
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
- }
- }
+
+
+/***/ }),
+/* 338 */
+/***/ (function(__unusedmodule, exports) {
+
+"use strict";
+/*!
+ * Copyright (c) 2015, Salesforce.com, Inc.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ *
+ * 3. Neither the name of Salesforce.com nor the names of its contributors may
+ * be used to endorse or promote products derived from this software without
+ * specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+
+/*jshint unused:false */
+
+class Store {
+ constructor() {
+ this.synchronous = false;
+ }
+
+ findCookie(domain, path, key, cb) {
+ throw new Error("findCookie is not implemented");
+ }
+
+ findCookies(domain, path, allowSpecialUseDomain, cb) {
+ throw new Error("findCookies is not implemented");
+ }
+
+ putCookie(cookie, cb) {
+ throw new Error("putCookie is not implemented");
+ }
+
+ updateCookie(oldCookie, newCookie, cb) {
+ // recommended default implementation:
+ // return this.putCookie(newCookie, cb);
+ throw new Error("updateCookie is not implemented");
+ }
+
+ removeCookie(domain, path, key, cb) {
+ throw new Error("removeCookie is not implemented");
+ }
+
+ removeCookies(domain, path, cb) {
+ throw new Error("removeCookies is not implemented");
+ }
+
+ removeAllCookies(cb) {
+ throw new Error("removeAllCookies is not implemented");
+ }
+
+ getAllCookies(cb) {
+ throw new Error(
+ "getAllCookies is not implemented (therefore jar cannot be serialized)"
+ );
+ }
+}
+
+exports.Store = Store;
+
+
+/***/ }),
+/* 339 */,
+/* 340 */
+/***/ (function(__unusedmodule, exports) {
+
+"use strict";
+
+/*
+ * Copyright The OpenTelemetry Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.SamplingDecision = void 0;
+/**
+ * A sampling decision that determines how a {@link Span} will be recorded
+ * and collected.
+ */
+var SamplingDecision;
+(function (SamplingDecision) {
+ /**
+ * `Span.isRecording() === false`, span will not be recorded and all events
+ * and attributes will be dropped.
+ */
+ SamplingDecision[SamplingDecision["NOT_RECORD"] = 0] = "NOT_RECORD";
+ /**
+ * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags}
+ * MUST NOT be set.
+ */
+ SamplingDecision[SamplingDecision["RECORD"] = 1] = "RECORD";
+ /**
+ * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags}
+ * MUST be set.
+ */
+ SamplingDecision[SamplingDecision["RECORD_AND_SAMPLED"] = 2] = "RECORD_AND_SAMPLED";
+})(SamplingDecision = exports.SamplingDecision || (exports.SamplingDecision = {}));
+//# sourceMappingURL=SamplingResult.js.map
+
+/***/ }),
+/* 341 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+const stripAnsi = __webpack_require__(569);
+const isFullwidthCodePoint = __webpack_require__(97);
+
+module.exports = str => {
+ if (typeof str !== 'string' || str.length === 0) {
+ return 0;
+ }
+
+ str = stripAnsi(str);
+
+ let width = 0;
+
+ for (let i = 0; i < str.length; i++) {
+ const code = str.codePointAt(i);
+
+ // Ignore control characters
+ if (code <= 0x1F || (code >= 0x7F && code <= 0x9F)) {
+ continue;
+ }
+
+ // Ignore combining characters
+ if (code >= 0x300 && code <= 0x36F) {
+ continue;
+ }
+
+ // Surrogates
+ if (code > 0xFFFF) {
+ i++;
+ }
+
+ width += isFullwidthCodePoint(code) ? 2 : 1;
+ }
+
+ return width;
};
-var BlobSetAccessControlHeaders = {
- serializedName: "blob-setaccesscontrol-headers",
- type: {
- name: "Composite",
- className: "BlobSetAccessControlHeaders",
- modelProperties: {
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- }
- }
- }
-};
-var BlobGetAccessControlHeaders = {
- serializedName: "blob-getaccesscontrol-headers",
- type: {
- name: "Composite",
- className: "BlobGetAccessControlHeaders",
- modelProperties: {
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- xMsOwner: {
- serializedName: "x-ms-owner",
- type: {
- name: "String"
- }
- },
- xMsGroup: {
- serializedName: "x-ms-group",
- type: {
- name: "String"
- }
- },
- xMsPermissions: {
- serializedName: "x-ms-permissions",
- type: {
- name: "String"
- }
- },
- xMsAcl: {
- serializedName: "x-ms-acl",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- }
+
+
+/***/ }),
+/* 342 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = rimraf
+rimraf.sync = rimrafSync
+
+var assert = __webpack_require__(357)
+var path = __webpack_require__(622)
+var fs = __webpack_require__(747)
+var glob = undefined
+try {
+ glob = __webpack_require__(402)
+} catch (_err) {
+ // treat glob as optional.
+}
+var _0666 = parseInt('666', 8)
+
+var defaultGlobOpts = {
+ nosort: true,
+ silent: true
+}
+
+// for EMFILE handling
+var timeout = 0
+
+var isWindows = (process.platform === "win32")
+
+function defaults (options) {
+ var methods = [
+ 'unlink',
+ 'chmod',
+ 'stat',
+ 'lstat',
+ 'rmdir',
+ 'readdir'
+ ]
+ methods.forEach(function(m) {
+ options[m] = options[m] || fs[m]
+ m = m + 'Sync'
+ options[m] = options[m] || fs[m]
+ })
+
+ options.maxBusyTries = options.maxBusyTries || 3
+ options.emfileWait = options.emfileWait || 1000
+ if (options.glob === false) {
+ options.disableGlob = true
+ }
+ if (options.disableGlob !== true && glob === undefined) {
+ throw Error('glob dependency not found, set `options.disableGlob = true` if intentional')
+ }
+ options.disableGlob = options.disableGlob || false
+ options.glob = options.glob || defaultGlobOpts
+}
+
+function rimraf (p, options, cb) {
+ if (typeof options === 'function') {
+ cb = options
+ options = {}
+ }
+
+ assert(p, 'rimraf: missing path')
+ assert.equal(typeof p, 'string', 'rimraf: path should be a string')
+ assert.equal(typeof cb, 'function', 'rimraf: callback function required')
+ assert(options, 'rimraf: invalid options argument provided')
+ assert.equal(typeof options, 'object', 'rimraf: options should be object')
+
+ defaults(options)
+
+ var busyTries = 0
+ var errState = null
+ var n = 0
+
+ if (options.disableGlob || !glob.hasMagic(p))
+ return afterGlob(null, [p])
+
+ options.lstat(p, function (er, stat) {
+ if (!er)
+ return afterGlob(null, [p])
+
+ glob(p, options.glob, afterGlob)
+ })
+
+ function next (er) {
+ errState = errState || er
+ if (--n === 0)
+ cb(errState)
+ }
+
+ function afterGlob (er, results) {
+ if (er)
+ return cb(er)
+
+ n = results.length
+ if (n === 0)
+ return cb()
+
+ results.forEach(function (p) {
+ rimraf_(p, options, function CB (er) {
+ if (er) {
+ if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") &&
+ busyTries < options.maxBusyTries) {
+ busyTries ++
+ var time = busyTries * 100
+ // try again, with the same exact callback as this one.
+ return setTimeout(function () {
+ rimraf_(p, options, CB)
+ }, time)
+ }
+
+ // this one won't happen if graceful-fs is used.
+ if (er.code === "EMFILE" && timeout < options.emfileWait) {
+ return setTimeout(function () {
+ rimraf_(p, options, CB)
+ }, timeout ++)
+ }
+
+ // already gone
+ if (er.code === "ENOENT") er = null
}
+
+ timeout = 0
+ next(er)
+ })
+ })
+ }
+}
+
+// Two possible strategies.
+// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR
+// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR
+//
+// Both result in an extra syscall when you guess wrong. However, there
+// are likely far more normal files in the world than directories. This
+// is based on the assumption that a the average number of files per
+// directory is >= 1.
+//
+// If anyone ever complains about this, then I guess the strategy could
+// be made configurable somehow. But until then, YAGNI.
+function rimraf_ (p, options, cb) {
+ assert(p)
+ assert(options)
+ assert(typeof cb === 'function')
+
+ // sunos lets the root user unlink directories, which is... weird.
+ // so we have to lstat here and make sure it's not a dir.
+ options.lstat(p, function (er, st) {
+ if (er && er.code === "ENOENT")
+ return cb(null)
+
+ // Windows can EPERM on stat. Life is suffering.
+ if (er && er.code === "EPERM" && isWindows)
+ fixWinEPERM(p, options, er, cb)
+
+ if (st && st.isDirectory())
+ return rmdir(p, options, er, cb)
+
+ options.unlink(p, function (er) {
+ if (er) {
+ if (er.code === "ENOENT")
+ return cb(null)
+ if (er.code === "EPERM")
+ return (isWindows)
+ ? fixWinEPERM(p, options, er, cb)
+ : rmdir(p, options, er, cb)
+ if (er.code === "EISDIR")
+ return rmdir(p, options, er, cb)
+ }
+ return cb(er)
+ })
+ })
+}
+
+function fixWinEPERM (p, options, er, cb) {
+ assert(p)
+ assert(options)
+ assert(typeof cb === 'function')
+ if (er)
+ assert(er instanceof Error)
+
+ options.chmod(p, _0666, function (er2) {
+ if (er2)
+ cb(er2.code === "ENOENT" ? null : er)
+ else
+ options.stat(p, function(er3, stats) {
+ if (er3)
+ cb(er3.code === "ENOENT" ? null : er)
+ else if (stats.isDirectory())
+ rmdir(p, options, er, cb)
+ else
+ options.unlink(p, cb)
+ })
+ })
+}
+
+function fixWinEPERMSync (p, options, er) {
+ assert(p)
+ assert(options)
+ if (er)
+ assert(er instanceof Error)
+
+ try {
+ options.chmodSync(p, _0666)
+ } catch (er2) {
+ if (er2.code === "ENOENT")
+ return
+ else
+ throw er
+ }
+
+ try {
+ var stats = options.statSync(p)
+ } catch (er3) {
+ if (er3.code === "ENOENT")
+ return
+ else
+ throw er
+ }
+
+ if (stats.isDirectory())
+ rmdirSync(p, options, er)
+ else
+ options.unlinkSync(p)
+}
+
+function rmdir (p, options, originalEr, cb) {
+ assert(p)
+ assert(options)
+ if (originalEr)
+ assert(originalEr instanceof Error)
+ assert(typeof cb === 'function')
+
+ // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS)
+ // if we guessed wrong, and it's not a directory, then
+ // raise the original error.
+ options.rmdir(p, function (er) {
+ if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM"))
+ rmkids(p, options, cb)
+ else if (er && er.code === "ENOTDIR")
+ cb(originalEr)
+ else
+ cb(er)
+ })
+}
+
+function rmkids(p, options, cb) {
+ assert(p)
+ assert(options)
+ assert(typeof cb === 'function')
+
+ options.readdir(p, function (er, files) {
+ if (er)
+ return cb(er)
+ var n = files.length
+ if (n === 0)
+ return options.rmdir(p, cb)
+ var errState
+ files.forEach(function (f) {
+ rimraf(path.join(p, f), options, function (er) {
+ if (errState)
+ return
+ if (er)
+ return cb(errState = er)
+ if (--n === 0)
+ options.rmdir(p, cb)
+ })
+ })
+ })
+}
+
+// this looks simpler, and is strictly *faster*, but will
+// tie up the JavaScript thread and fail on excessively
+// deep directory trees.
+function rimrafSync (p, options) {
+ options = options || {}
+ defaults(options)
+
+ assert(p, 'rimraf: missing path')
+ assert.equal(typeof p, 'string', 'rimraf: path should be a string')
+ assert(options, 'rimraf: missing options')
+ assert.equal(typeof options, 'object', 'rimraf: options should be object')
+
+ var results
+
+ if (options.disableGlob || !glob.hasMagic(p)) {
+ results = [p]
+ } else {
+ try {
+ options.lstatSync(p)
+ results = [p]
+ } catch (er) {
+ results = glob.sync(p, options.glob)
}
-};
-var BlobRenameHeaders = {
- serializedName: "blob-rename-headers",
- type: {
- name: "Composite",
- className: "BlobRenameHeaders",
- modelProperties: {
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- contentLength: {
- serializedName: "content-length",
- type: {
- name: "Number"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- }
- }
+ }
+
+ if (!results.length)
+ return
+
+ for (var i = 0; i < results.length; i++) {
+ var p = results[i]
+
+ try {
+ var st = options.lstatSync(p)
+ } catch (er) {
+ if (er.code === "ENOENT")
+ return
+
+ // Windows can EPERM on stat. Life is suffering.
+ if (er.code === "EPERM" && isWindows)
+ fixWinEPERMSync(p, options, er)
}
-};
-var PageBlobCreateHeaders = {
- serializedName: "pageblob-create-headers",
- type: {
- name: "Composite",
- className: "PageBlobCreateHeaders",
- modelProperties: {
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- contentMD5: {
- serializedName: "content-md5",
- type: {
- name: "ByteArray"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- versionId: {
- serializedName: "x-ms-version-id",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- isServerEncrypted: {
- serializedName: "x-ms-request-server-encrypted",
- type: {
- name: "Boolean"
- }
- },
- encryptionKeySha256: {
- serializedName: "x-ms-encryption-key-sha256",
- type: {
- name: "String"
- }
- },
- encryptionScope: {
- serializedName: "x-ms-encryption-scope",
- type: {
- name: "String"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
- }
+
+ try {
+ // sunos lets the root user unlink directories, which is... weird.
+ if (st && st.isDirectory())
+ rmdirSync(p, options, null)
+ else
+ options.unlinkSync(p)
+ } catch (er) {
+ if (er.code === "ENOENT")
+ return
+ if (er.code === "EPERM")
+ return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er)
+ if (er.code !== "EISDIR")
+ throw er
+
+ rmdirSync(p, options, er)
}
-};
-var AppendBlobCreateHeaders = {
- serializedName: "appendblob-create-headers",
- type: {
- name: "Composite",
- className: "AppendBlobCreateHeaders",
- modelProperties: {
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- contentMD5: {
- serializedName: "content-md5",
- type: {
- name: "ByteArray"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- versionId: {
- serializedName: "x-ms-version-id",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- isServerEncrypted: {
- serializedName: "x-ms-request-server-encrypted",
- type: {
- name: "Boolean"
- }
- },
- encryptionKeySha256: {
- serializedName: "x-ms-encryption-key-sha256",
- type: {
- name: "String"
- }
- },
- encryptionScope: {
- serializedName: "x-ms-encryption-scope",
- type: {
- name: "String"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
- }
+ }
+}
+
+function rmdirSync (p, options, originalEr) {
+ assert(p)
+ assert(options)
+ if (originalEr)
+ assert(originalEr instanceof Error)
+
+ try {
+ options.rmdirSync(p)
+ } catch (er) {
+ if (er.code === "ENOENT")
+ return
+ if (er.code === "ENOTDIR")
+ throw originalEr
+ if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")
+ rmkidsSync(p, options)
+ }
+}
+
+function rmkidsSync (p, options) {
+ assert(p)
+ assert(options)
+ options.readdirSync(p).forEach(function (f) {
+ rimrafSync(path.join(p, f), options)
+ })
+
+ // We only end up here once we got ENOTEMPTY at least once, and
+ // at this point, we are guaranteed to have removed all the kids.
+ // So, we know that it won't be ENOENT or ENOTDIR or anything else.
+ // try really hard to delete stuff on windows, because it has a
+ // PROFOUNDLY annoying habit of not closing handles promptly when
+ // files are deleted, resulting in spurious ENOTEMPTY errors.
+ var retries = isWindows ? 100 : 1
+ var i = 0
+ do {
+ var threw = true
+ try {
+ var ret = options.rmdirSync(p, options)
+ threw = false
+ return ret
+ } finally {
+ if (++i < retries && threw)
+ continue
}
-};
-var BlockBlobUploadHeaders = {
- serializedName: "blockblob-upload-headers",
- type: {
- name: "Composite",
- className: "BlockBlobUploadHeaders",
- modelProperties: {
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- contentMD5: {
- serializedName: "content-md5",
- type: {
- name: "ByteArray"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- versionId: {
- serializedName: "x-ms-version-id",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- isServerEncrypted: {
- serializedName: "x-ms-request-server-encrypted",
- type: {
- name: "Boolean"
- }
- },
- encryptionKeySha256: {
- serializedName: "x-ms-encryption-key-sha256",
- type: {
- name: "String"
- }
- },
- encryptionScope: {
- serializedName: "x-ms-encryption-scope",
- type: {
- name: "String"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
+ } while (true)
+}
+
+
+/***/ }),
+/* 343 */
+/***/ (function(module) {
+
+module.exports = require("timers");
+
+/***/ }),
+/* 344 */,
+/* 345 */,
+/* 346 */,
+/* 347 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+// Generated by CoffeeScript 1.12.7
+(function() {
+ var XMLStringWriter, XMLWriterBase,
+ extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; },
+ hasProp = {}.hasOwnProperty;
+
+ XMLWriterBase = __webpack_require__(423);
+
+ module.exports = XMLStringWriter = (function(superClass) {
+ extend(XMLStringWriter, superClass);
+
+ function XMLStringWriter(options) {
+ XMLStringWriter.__super__.constructor.call(this, options);
+ }
+
+ XMLStringWriter.prototype.document = function(doc, options) {
+ var child, i, len, r, ref;
+ options = this.filterOptions(options);
+ r = '';
+ ref = doc.children;
+ for (i = 0, len = ref.length; i < len; i++) {
+ child = ref[i];
+ r += this.writeChildNode(child, options, 0);
+ }
+ if (options.pretty && r.slice(-options.newline.length) === options.newline) {
+ r = r.slice(0, -options.newline.length);
+ }
+ return r;
+ };
+
+ return XMLStringWriter;
+
+ })(XMLWriterBase);
+
+}).call(this);
+
+
+/***/ }),
+/* 348 */
+/***/ (function(__unusedmodule, exports) {
+
+"use strict";
+/*!
+ * Copyright (c) 2015, Salesforce.com, Inc.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ *
+ * 3. Neither the name of Salesforce.com nor the names of its contributors may
+ * be used to endorse or promote products derived from this software without
+ * specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+
+/*
+ * "A request-path path-matches a given cookie-path if at least one of the
+ * following conditions holds:"
+ */
+function pathMatch(reqPath, cookiePath) {
+ // "o The cookie-path and the request-path are identical."
+ if (cookiePath === reqPath) {
+ return true;
+ }
+
+ const idx = reqPath.indexOf(cookiePath);
+ if (idx === 0) {
+ // "o The cookie-path is a prefix of the request-path, and the last
+ // character of the cookie-path is %x2F ("/")."
+ if (cookiePath.substr(-1) === "/") {
+ return true;
+ }
+
+ // " o The cookie-path is a prefix of the request-path, and the first
+ // character of the request-path that is not included in the cookie- path
+ // is a %x2F ("/") character."
+ if (reqPath.substr(cookiePath.length, 1) === "/") {
+ return true;
+ }
+ }
+
+ return false;
+}
+
+exports.pathMatch = pathMatch;
+
+
+/***/ }),
+/* 349 */,
+/* 350 */
+/***/ (function(__unusedmodule, exports) {
+
+// Generated by CoffeeScript 1.12.7
+(function() {
+ "use strict";
+ var prefixMatch;
+
+ prefixMatch = new RegExp(/(?!xmlns)^.*:/);
+
+ exports.normalize = function(str) {
+ return str.toLowerCase();
+ };
+
+ exports.firstCharLowerCase = function(str) {
+ return str.charAt(0).toLowerCase() + str.slice(1);
+ };
+
+ exports.stripPrefix = function(str) {
+ return str.replace(prefixMatch, '');
+ };
+
+ exports.parseNumbers = function(str) {
+ if (!isNaN(str)) {
+ str = str % 1 === 0 ? parseInt(str, 10) : parseFloat(str);
+ }
+ return str;
+ };
+
+ exports.parseBooleans = function(str) {
+ if (/^(?:true|false)$/i.test(str)) {
+ str = str.toLowerCase() === 'true';
+ }
+ return str;
+ };
+
+}).call(this);
+
+
+/***/ }),
+/* 351 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+var es5 = __webpack_require__(883);
+var Objectfreeze = es5.freeze;
+var util = __webpack_require__(248);
+var inherits = util.inherits;
+var notEnumerableProp = util.notEnumerableProp;
+
+function subError(nameProperty, defaultMessage) {
+ function SubError(message) {
+ if (!(this instanceof SubError)) return new SubError(message);
+ notEnumerableProp(this, "message",
+ typeof message === "string" ? message : defaultMessage);
+ notEnumerableProp(this, "name", nameProperty);
+ if (Error.captureStackTrace) {
+ Error.captureStackTrace(this, this.constructor);
+ } else {
+ Error.call(this);
}
}
-};
-var BlobUndeleteHeaders = {
- serializedName: "blob-undelete-headers",
- type: {
- name: "Composite",
- className: "BlobUndeleteHeaders",
- modelProperties: {
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
+ inherits(SubError, Error);
+ return SubError;
+}
+
+var _TypeError, _RangeError;
+var Warning = subError("Warning", "warning");
+var CancellationError = subError("CancellationError", "cancellation error");
+var TimeoutError = subError("TimeoutError", "timeout error");
+var AggregateError = subError("AggregateError", "aggregate error");
+try {
+ _TypeError = TypeError;
+ _RangeError = RangeError;
+} catch(e) {
+ _TypeError = subError("TypeError", "type error");
+ _RangeError = subError("RangeError", "range error");
+}
+
+var methods = ("join pop push shift unshift slice filter forEach some " +
+ "every map indexOf lastIndexOf reduce reduceRight sort reverse").split(" ");
+
+for (var i = 0; i < methods.length; ++i) {
+ if (typeof Array.prototype[methods[i]] === "function") {
+ AggregateError.prototype[methods[i]] = Array.prototype[methods[i]];
+ }
+}
+
+es5.defineProperty(AggregateError.prototype, "length", {
+ value: 0,
+ configurable: false,
+ writable: true,
+ enumerable: true
+});
+AggregateError.prototype["isOperational"] = true;
+var level = 0;
+AggregateError.prototype.toString = function() {
+ var indent = Array(level * 4 + 1).join(" ");
+ var ret = "\n" + indent + "AggregateError of:" + "\n";
+ level++;
+ indent = Array(level * 4 + 1).join(" ");
+ for (var i = 0; i < this.length; ++i) {
+ var str = this[i] === this ? "[Circular AggregateError]" : this[i] + "";
+ var lines = str.split("\n");
+ for (var j = 0; j < lines.length; ++j) {
+ lines[j] = indent + lines[j];
}
+ str = lines.join("\n");
+ ret += str + "\n";
}
+ level--;
+ return ret;
};
-var BlobSetExpiryHeaders = {
- serializedName: "blob-setexpiry-headers",
- type: {
- name: "Composite",
- className: "BlobSetExpiryHeaders",
- modelProperties: {
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
- }
+
+function OperationalError(message) {
+ if (!(this instanceof OperationalError))
+ return new OperationalError(message);
+ notEnumerableProp(this, "name", "OperationalError");
+ notEnumerableProp(this, "message", message);
+ this.cause = message;
+ this["isOperational"] = true;
+
+ if (message instanceof Error) {
+ notEnumerableProp(this, "message", message.message);
+ notEnumerableProp(this, "stack", message.stack);
+ } else if (Error.captureStackTrace) {
+ Error.captureStackTrace(this, this.constructor);
}
+
+}
+inherits(OperationalError, Error);
+
+var errorTypes = Error["__BluebirdErrorTypes__"];
+if (!errorTypes) {
+ errorTypes = Objectfreeze({
+ CancellationError: CancellationError,
+ TimeoutError: TimeoutError,
+ OperationalError: OperationalError,
+ RejectionError: OperationalError,
+ AggregateError: AggregateError
+ });
+ es5.defineProperty(Error, "__BluebirdErrorTypes__", {
+ value: errorTypes,
+ writable: false,
+ enumerable: false,
+ configurable: false
+ });
+}
+
+module.exports = {
+ Error: Error,
+ TypeError: _TypeError,
+ RangeError: _RangeError,
+ CancellationError: errorTypes.CancellationError,
+ OperationalError: errorTypes.OperationalError,
+ TimeoutError: errorTypes.TimeoutError,
+ AggregateError: errorTypes.AggregateError,
+ Warning: Warning
};
-var BlobSetHTTPHeadersHeaders = {
- serializedName: "blob-sethttpheaders-headers",
- type: {
- name: "Composite",
- className: "BlobSetHTTPHeadersHeaders",
- modelProperties: {
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- blobSequenceNumber: {
- serializedName: "x-ms-blob-sequence-number",
- type: {
- name: "Number"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
+
+
+/***/ }),
+/* 352 */,
+/* 353 */
+/***/ (function(module) {
+
+"use strict";
+
+
+function createError(msg, code, props) {
+ var err = msg instanceof Error ? msg : new Error(msg);
+ var key;
+
+ if (typeof code === 'object') {
+ props = code;
+ } else if (code != null) {
+ err.code = code;
+ }
+
+ if (props) {
+ for (key in props) {
+ err[key] = props[key];
}
}
-};
-var BlobSetMetadataHeaders = {
- serializedName: "blob-setmetadata-headers",
- type: {
- name: "Composite",
- className: "BlobSetMetadataHeaders",
- modelProperties: {
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
+
+ return err;
+}
+
+module.exports = createError;
+
+
+/***/ }),
+/* 354 */
+/***/ (function(module) {
+
+"use strict";
+
+
+const LEVELS = [
+ 'notice',
+ 'error',
+ 'warn',
+ 'info',
+ 'verbose',
+ 'http',
+ 'silly',
+ 'pause',
+ 'resume'
+]
+
+const logger = {}
+for (const level of LEVELS) {
+ logger[level] = log(level)
+}
+module.exports = logger
+
+function log (level) {
+ return (category, ...args) => process.emit('log', level, category, ...args)
+}
+
+
+/***/ }),
+/* 355 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = {
+ publish: __webpack_require__(395),
+ unpublish: __webpack_require__(368)
+}
+
+
+/***/ }),
+/* 356 */
+/***/ (function(module) {
+
+"use strict";
+
+// this exists so we can replace it during testing
+module.exports = process
+
+
+/***/ }),
+/* 357 */
+/***/ (function(module) {
+
+module.exports = require("assert");
+
+/***/ }),
+/* 358 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+
+// A module for chowning things we just created, to preserve
+// ownership of new links and directories.
+
+const chownr = __webpack_require__(941)
+
+const selfOwner = {
+ uid: process.getuid && process.getuid(),
+ gid: process.getgid && process.getgid()
+}
+
+module.exports = (path, uid, gid, cb) => {
+ if (selfOwner.uid !== 0 ||
+ uid === undefined || gid === undefined ||
+ (selfOwner.uid === uid && selfOwner.gid === gid)) {
+ // don't need to, or can't chown anyway, so just leave it.
+ // this also handles platforms where process.getuid is undefined
+ return cb()
+ }
+ chownr(path, uid, gid, cb)
+}
+
+module.exports.selfOwner = selfOwner
+
+
+/***/ }),
+/* 359 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const events_1 = __webpack_require__(614);
+const net = __webpack_require__(631);
+const ip = __webpack_require__(769);
+const smart_buffer_1 = __webpack_require__(118);
+const constants_1 = __webpack_require__(583);
+const helpers_1 = __webpack_require__(372);
+const receivebuffer_1 = __webpack_require__(806);
+const util_1 = __webpack_require__(526);
+class SocksClient extends events_1.EventEmitter {
+ constructor(options) {
+ super();
+ this._options = Object.assign({}, options);
+ // Validate SocksClientOptions
+ helpers_1.validateSocksClientOptions(options);
+ // Default state
+ this.state = constants_1.SocksClientState.Created;
+ }
+ /**
+ * Creates a new SOCKS connection.
+ *
+ * Note: Supports callbacks and promises. Only supports the connect command.
+ * @param options { SocksClientOptions } Options.
+ * @param callback { Function } An optional callback function.
+ * @returns { Promise }
+ */
+ static createConnection(options, callback) {
+ // Validate SocksClientOptions
+ helpers_1.validateSocksClientOptions(options, ['connect']);
+ return new Promise((resolve, reject) => {
+ const client = new SocksClient(options);
+ client.connect(options.existing_socket);
+ client.once('established', (info) => {
+ client.removeAllListeners();
+ if (typeof callback === 'function') {
+ callback(null, info);
+ resolve(); // Resolves pending promise (prevents memory leaks).
}
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
+ else {
+ resolve(info);
}
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
+ });
+ // Error occurred, failed to establish connection.
+ client.once('error', (err) => {
+ client.removeAllListeners();
+ if (typeof callback === 'function') {
+ callback(err);
+ resolve(); // Resolves pending promise (prevents memory leaks).
}
- },
- versionId: {
- serializedName: "x-ms-version-id",
- type: {
- name: "String"
+ else {
+ reject(err);
}
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
+ });
+ });
+ }
+ /**
+ * Creates a new SOCKS connection chain to a destination host through 2 or more SOCKS proxies.
+ *
+ * Note: Supports callbacks and promises. Only supports the connect method.
+ * Note: Implemented via createConnection() factory function.
+ * @param options { SocksClientChainOptions } Options
+ * @param callback { Function } An optional callback function.
+ * @returns { Promise }
+ */
+ static createConnectionChain(options, callback) {
+ // Validate SocksClientChainOptions
+ helpers_1.validateSocksClientChainOptions(options);
+ // Shuffle proxies
+ if (options.randomizeChain) {
+ util_1.shuffleArray(options.proxies);
+ }
+ return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {
+ let sock;
+ try {
+ for (let i = 0; i < options.proxies.length; i++) {
+ const nextProxy = options.proxies[i];
+ // If we've reached the last proxy in the chain, the destination is the actual destination, otherwise it's the next proxy.
+ const nextDestination = i === options.proxies.length - 1
+ ? options.destination
+ : {
+ host: options.proxies[i + 1].ipaddress,
+ port: options.proxies[i + 1].port
+ };
+ // Creates the next connection in the chain.
+ const result = yield SocksClient.createConnection({
+ command: 'connect',
+ proxy: nextProxy,
+ destination: nextDestination
+ // Initial connection ignores this as sock is undefined. Subsequent connections re-use the first proxy socket to form a chain.
+ });
+ // If sock is undefined, assign it here.
+ if (!sock) {
+ sock = result.socket;
+ }
}
- },
- isServerEncrypted: {
- serializedName: "x-ms-request-server-encrypted",
- type: {
- name: "Boolean"
+ if (typeof callback === 'function') {
+ callback(null, { socket: sock });
+ resolve(); // Resolves pending promise (prevents memory leaks).
}
- },
- encryptionKeySha256: {
- serializedName: "x-ms-encryption-key-sha256",
- type: {
- name: "String"
+ else {
+ resolve({ socket: sock });
}
- },
- encryptionScope: {
- serializedName: "x-ms-encryption-scope",
- type: {
- name: "String"
+ }
+ catch (err) {
+ if (typeof callback === 'function') {
+ callback(err);
+ resolve(); // Resolves pending promise (prevents memory leaks).
}
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
+ else {
+ reject(err);
}
}
+ }));
+ }
+ /**
+ * Creates a SOCKS UDP Frame.
+ * @param options
+ */
+ static createUDPFrame(options) {
+ const buff = new smart_buffer_1.SmartBuffer();
+ buff.writeUInt16BE(0);
+ buff.writeUInt8(options.frameNumber || 0);
+ // IPv4/IPv6/Hostname
+ if (net.isIPv4(options.remoteHost.host)) {
+ buff.writeUInt8(constants_1.Socks5HostType.IPv4);
+ buff.writeUInt32BE(ip.toLong(options.remoteHost.host));
+ }
+ else if (net.isIPv6(options.remoteHost.host)) {
+ buff.writeUInt8(constants_1.Socks5HostType.IPv6);
+ buff.writeBuffer(ip.toBuffer(options.remoteHost.host));
+ }
+ else {
+ buff.writeUInt8(constants_1.Socks5HostType.Hostname);
+ buff.writeUInt8(Buffer.byteLength(options.remoteHost.host));
+ buff.writeString(options.remoteHost.host);
}
+ // Port
+ buff.writeUInt16BE(options.remoteHost.port);
+ // Data
+ buff.writeBuffer(options.data);
+ return buff.toBuffer();
}
-};
-var BlobAcquireLeaseHeaders = {
- serializedName: "blob-acquirelease-headers",
- type: {
- name: "Composite",
- className: "BlobAcquireLeaseHeaders",
- modelProperties: {
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- leaseId: {
- serializedName: "x-ms-lease-id",
- type: {
- name: "String"
- }
+ /**
+ * Parses a SOCKS UDP frame.
+ * @param data
+ */
+ static parseUDPFrame(data) {
+ const buff = smart_buffer_1.SmartBuffer.fromBuffer(data);
+ buff.readOffset = 2;
+ const frameNumber = buff.readUInt8();
+ const hostType = buff.readUInt8();
+ let remoteHost;
+ if (hostType === constants_1.Socks5HostType.IPv4) {
+ remoteHost = ip.fromLong(buff.readUInt32BE());
+ }
+ else if (hostType === constants_1.Socks5HostType.IPv6) {
+ remoteHost = ip.toString(buff.readBuffer(16));
+ }
+ else {
+ remoteHost = buff.readString(buff.readUInt8());
+ }
+ const remotePort = buff.readUInt16BE();
+ return {
+ frameNumber,
+ remoteHost: {
+ host: remoteHost,
+ port: remotePort
},
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
+ data: buff.readBuffer()
+ };
+ }
+ /**
+ * Gets the SocksClient internal state.
+ */
+ get state() {
+ return this._state;
+ }
+ /**
+ * Internal state setter. If the SocksClient is in an error state, it cannot be changed to a non error state.
+ */
+ set state(newState) {
+ if (this._state !== constants_1.SocksClientState.Error) {
+ this._state = newState;
+ }
+ }
+ /**
+ * Starts the connection establishment to the proxy and destination.
+ * @param existing_socket Connected socket to use instead of creating a new one (internal use).
+ */
+ connect(existing_socket) {
+ this._onDataReceived = (data) => this.onDataReceived(data);
+ this._onClose = () => this.onClose();
+ this._onError = (err) => this.onError(err);
+ this._onConnect = () => this.onConnect();
+ // Start timeout timer (defaults to 30 seconds)
+ const timer = setTimeout(() => this.onEstablishedTimeout(), this._options.timeout || constants_1.DEFAULT_TIMEOUT);
+ // check whether unref is available as it differs from browser to NodeJS (#33)
+ if (timer.unref && typeof timer.unref === 'function') {
+ timer.unref();
+ }
+ // If an existing socket is provided, use it to negotiate SOCKS handshake. Otherwise create a new Socket.
+ if (existing_socket) {
+ this._socket = existing_socket;
+ }
+ else {
+ this._socket = new net.Socket();
+ }
+ // Attach Socket error handlers.
+ this._socket.once('close', this._onClose);
+ this._socket.once('error', this._onError);
+ this._socket.once('connect', this._onConnect);
+ this._socket.on('data', this._onDataReceived);
+ this.state = constants_1.SocksClientState.Connecting;
+ this._receiveBuffer = new receivebuffer_1.ReceiveBuffer();
+ if (existing_socket) {
+ this._socket.emit('connect');
+ }
+ else {
+ this._socket.connect(this.getSocketOptions());
+ if (this._options.set_tcp_nodelay !== undefined &&
+ this._options.set_tcp_nodelay !== null) {
+ this._socket.setNoDelay(!!this._options.set_tcp_nodelay);
+ }
+ }
+ // Listen for established event so we can re-emit any excess data received during handshakes.
+ this.prependOnceListener('established', info => {
+ setImmediate(() => {
+ if (this._receiveBuffer.length > 0) {
+ const excessData = this._receiveBuffer.get(this._receiveBuffer.length);
+ info.socket.emit('data', excessData);
}
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
+ info.socket.resume();
+ });
+ });
+ }
+ // Socket options (defaults host/port to options.proxy.host/options.proxy.port)
+ getSocketOptions() {
+ return Object.assign(Object.assign({}, this._options.socket_options), { host: this._options.proxy.host || this._options.proxy.ipaddress, port: this._options.proxy.port });
+ }
+ /**
+ * Handles internal Socks timeout callback.
+ * Note: If the Socks client is not BoundWaitingForConnection or Established, the connection will be closed.
+ */
+ onEstablishedTimeout() {
+ if (this.state !== constants_1.SocksClientState.Established &&
+ this.state !== constants_1.SocksClientState.BoundWaitingForConnection) {
+ this._closeSocket(constants_1.ERRORS.ProxyConnectionTimedOut);
+ }
+ }
+ /**
+ * Handles Socket connect event.
+ */
+ onConnect() {
+ this.state = constants_1.SocksClientState.Connected;
+ // Send initial handshake.
+ if (this._options.proxy.type === 4) {
+ this.sendSocks4InitialHandshake();
+ }
+ else {
+ this.sendSocks5InitialHandshake();
+ }
+ this.state = constants_1.SocksClientState.SentInitialHandshake;
+ }
+ /**
+ * Handles Socket data event.
+ * @param data
+ */
+ onDataReceived(data) {
+ /*
+ All received data is appended to a ReceiveBuffer.
+ This makes sure that all the data we need is received before we attempt to process it.
+ */
+ this._receiveBuffer.append(data);
+ // Process data that we have.
+ this.processData();
+ }
+ /**
+ * Handles processing of the data we have received.
+ */
+ processData() {
+ // If we have enough data to process the next step in the SOCKS handshake, proceed.
+ if (this._receiveBuffer.length >= this._nextRequiredPacketBufferSize) {
+ // Sent initial handshake, waiting for response.
+ if (this.state === constants_1.SocksClientState.SentInitialHandshake) {
+ if (this._options.proxy.type === 4) {
+ // Socks v4 only has one handshake response.
+ this.handleSocks4FinalHandshakeResponse();
}
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
+ else {
+ // Socks v5 has two handshakes, handle initial one here.
+ this.handleInitialSocks5HandshakeResponse();
}
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
+ // Sent auth request for Socks v5, waiting for response.
+ }
+ else if (this.state === constants_1.SocksClientState.SentAuthentication) {
+ this.handleInitialSocks5AuthenticationHandshakeResponse();
+ // Sent final Socks v5 handshake, waiting for final response.
+ }
+ else if (this.state === constants_1.SocksClientState.SentFinalHandshake) {
+ this.handleSocks5FinalHandshakeResponse();
+ // Socks BIND established. Waiting for remote connection via proxy.
+ }
+ else if (this.state === constants_1.SocksClientState.BoundWaitingForConnection) {
+ if (this._options.proxy.type === 4) {
+ this.handleSocks4IncomingConnectionResponse();
}
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
+ else {
+ this.handleSocks5IncomingConnectionResponse();
}
}
+ else if (this.state === constants_1.SocksClientState.Established) {
+ // do nothing (prevents closing of the socket)
+ }
+ else {
+ this._closeSocket(constants_1.ERRORS.InternalError);
+ }
}
}
-};
-var BlobReleaseLeaseHeaders = {
- serializedName: "blob-releaselease-headers",
- type: {
- name: "Composite",
- className: "BlobReleaseLeaseHeaders",
- modelProperties: {
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
+ /**
+ * Handles Socket close event.
+ * @param had_error
+ */
+ onClose() {
+ this._closeSocket(constants_1.ERRORS.SocketClosed);
+ }
+ /**
+ * Handles Socket error event.
+ * @param err
+ */
+ onError(err) {
+ this._closeSocket(err.message);
+ }
+ /**
+ * Removes internal event listeners on the underlying Socket.
+ */
+ removeInternalSocketHandlers() {
+ // Pauses data flow of the socket (this is internally resumed after 'established' is emitted)
+ this._socket.pause();
+ this._socket.removeListener('data', this._onDataReceived);
+ this._socket.removeListener('close', this._onClose);
+ this._socket.removeListener('error', this._onError);
+ this._socket.removeListener('connect', this.onConnect);
+ }
+ /**
+ * Closes and destroys the underlying Socket. Emits an error event.
+ * @param err { String } An error string to include in error event.
+ */
+ _closeSocket(err) {
+ // Make sure only one 'error' event is fired for the lifetime of this SocksClient instance.
+ if (this.state !== constants_1.SocksClientState.Error) {
+ // Set internal state to Error.
+ this.state = constants_1.SocksClientState.Error;
+ // Destroy Socket
+ this._socket.destroy();
+ // Remove internal listeners
+ this.removeInternalSocketHandlers();
+ // Fire 'error' event.
+ this.emit('error', new util_1.SocksClientError(err, this._options));
+ }
+ }
+ /**
+ * Sends initial Socks v4 handshake request.
+ */
+ sendSocks4InitialHandshake() {
+ const userId = this._options.proxy.userId || '';
+ const buff = new smart_buffer_1.SmartBuffer();
+ buff.writeUInt8(0x04);
+ buff.writeUInt8(constants_1.SocksCommand[this._options.command]);
+ buff.writeUInt16BE(this._options.destination.port);
+ // Socks 4 (IPv4)
+ if (net.isIPv4(this._options.destination.host)) {
+ buff.writeBuffer(ip.toBuffer(this._options.destination.host));
+ buff.writeStringNT(userId);
+ // Socks 4a (hostname)
+ }
+ else {
+ buff.writeUInt8(0x00);
+ buff.writeUInt8(0x00);
+ buff.writeUInt8(0x00);
+ buff.writeUInt8(0x01);
+ buff.writeStringNT(userId);
+ buff.writeStringNT(this._options.destination.host);
+ }
+ this._nextRequiredPacketBufferSize =
+ constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks4Response;
+ this._socket.write(buff.toBuffer());
+ }
+ /**
+ * Handles Socks v4 handshake response.
+ * @param data
+ */
+ handleSocks4FinalHandshakeResponse() {
+ const data = this._receiveBuffer.get(8);
+ if (data[1] !== constants_1.Socks4Response.Granted) {
+ this._closeSocket(`${constants_1.ERRORS.Socks4ProxyRejectedConnection} - (${constants_1.Socks4Response[data[1]]})`);
+ }
+ else {
+ // Bind response
+ if (constants_1.SocksCommand[this._options.command] === constants_1.SocksCommand.bind) {
+ const buff = smart_buffer_1.SmartBuffer.fromBuffer(data);
+ buff.readOffset = 2;
+ const remoteHost = {
+ port: buff.readUInt16BE(),
+ host: ip.fromLong(buff.readUInt32BE())
+ };
+ // If host is 0.0.0.0, set to proxy host.
+ if (remoteHost.host === '0.0.0.0') {
+ remoteHost.host = this._options.proxy.ipaddress;
}
+ this.state = constants_1.SocksClientState.BoundWaitingForConnection;
+ this.emit('bound', { socket: this._socket, remoteHost });
+ // Connect response
+ }
+ else {
+ this.state = constants_1.SocksClientState.Established;
+ this.removeInternalSocketHandlers();
+ this.emit('established', { socket: this._socket });
}
}
}
-};
-var BlobRenewLeaseHeaders = {
- serializedName: "blob-renewlease-headers",
- type: {
- name: "Composite",
- className: "BlobRenewLeaseHeaders",
- modelProperties: {
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- leaseId: {
- serializedName: "x-ms-lease-id",
- type: {
- name: "String"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
+ /**
+ * Handles Socks v4 incoming connection request (BIND)
+ * @param data
+ */
+ handleSocks4IncomingConnectionResponse() {
+ const data = this._receiveBuffer.get(8);
+ if (data[1] !== constants_1.Socks4Response.Granted) {
+ this._closeSocket(`${constants_1.ERRORS.Socks4ProxyRejectedIncomingBoundConnection} - (${constants_1.Socks4Response[data[1]]})`);
+ }
+ else {
+ const buff = smart_buffer_1.SmartBuffer.fromBuffer(data);
+ buff.readOffset = 2;
+ const remoteHost = {
+ port: buff.readUInt16BE(),
+ host: ip.fromLong(buff.readUInt32BE())
+ };
+ this.state = constants_1.SocksClientState.Established;
+ this.removeInternalSocketHandlers();
+ this.emit('established', { socket: this._socket, remoteHost });
+ }
+ }
+ /**
+ * Sends initial Socks v5 handshake request.
+ */
+ sendSocks5InitialHandshake() {
+ const buff = new smart_buffer_1.SmartBuffer();
+ buff.writeUInt8(0x05);
+ // We should only tell the proxy we support user/pass auth if auth info is actually provided.
+ // Note: As of Tor v0.3.5.7+, if user/pass auth is an option from the client, by default it will always take priority.
+ if (this._options.proxy.userId || this._options.proxy.password) {
+ buff.writeUInt8(2);
+ buff.writeUInt8(constants_1.Socks5Auth.NoAuth);
+ buff.writeUInt8(constants_1.Socks5Auth.UserPass);
+ }
+ else {
+ buff.writeUInt8(1);
+ buff.writeUInt8(constants_1.Socks5Auth.NoAuth);
+ }
+ this._nextRequiredPacketBufferSize =
+ constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5InitialHandshakeResponse;
+ this._socket.write(buff.toBuffer());
+ this.state = constants_1.SocksClientState.SentInitialHandshake;
+ }
+ /**
+ * Handles initial Socks v5 handshake response.
+ * @param data
+ */
+ handleInitialSocks5HandshakeResponse() {
+ const data = this._receiveBuffer.get(2);
+ if (data[0] !== 0x05) {
+ this._closeSocket(constants_1.ERRORS.InvalidSocks5IntiailHandshakeSocksVersion);
+ }
+ else if (data[1] === 0xff) {
+ this._closeSocket(constants_1.ERRORS.InvalidSocks5InitialHandshakeNoAcceptedAuthType);
+ }
+ else {
+ // If selected Socks v5 auth method is no auth, send final handshake request.
+ if (data[1] === constants_1.Socks5Auth.NoAuth) {
+ this.sendSocks5CommandRequest();
+ // If selected Socks v5 auth method is user/password, send auth handshake.
+ }
+ else if (data[1] === constants_1.Socks5Auth.UserPass) {
+ this.sendSocks5UserPassAuthentication();
+ }
+ else {
+ this._closeSocket(constants_1.ERRORS.InvalidSocks5InitialHandshakeUnknownAuthType);
}
}
}
-};
-var BlobChangeLeaseHeaders = {
- serializedName: "blob-changelease-headers",
- type: {
- name: "Composite",
- className: "BlobChangeLeaseHeaders",
- modelProperties: {
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
+ /**
+ * Sends Socks v5 user & password auth handshake.
+ *
+ * Note: No auth and user/pass are currently supported.
+ */
+ sendSocks5UserPassAuthentication() {
+ const userId = this._options.proxy.userId || '';
+ const password = this._options.proxy.password || '';
+ const buff = new smart_buffer_1.SmartBuffer();
+ buff.writeUInt8(0x01);
+ buff.writeUInt8(Buffer.byteLength(userId));
+ buff.writeString(userId);
+ buff.writeUInt8(Buffer.byteLength(password));
+ buff.writeString(password);
+ this._nextRequiredPacketBufferSize =
+ constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5UserPassAuthenticationResponse;
+ this._socket.write(buff.toBuffer());
+ this.state = constants_1.SocksClientState.SentAuthentication;
+ }
+ /**
+ * Handles Socks v5 auth handshake response.
+ * @param data
+ */
+ handleInitialSocks5AuthenticationHandshakeResponse() {
+ this.state = constants_1.SocksClientState.ReceivedAuthenticationResponse;
+ const data = this._receiveBuffer.get(2);
+ if (data[1] !== 0x00) {
+ this._closeSocket(constants_1.ERRORS.Socks5AuthenticationFailed);
+ }
+ else {
+ this.sendSocks5CommandRequest();
+ }
+ }
+ /**
+ * Sends Socks v5 final handshake request.
+ */
+ sendSocks5CommandRequest() {
+ const buff = new smart_buffer_1.SmartBuffer();
+ buff.writeUInt8(0x05);
+ buff.writeUInt8(constants_1.SocksCommand[this._options.command]);
+ buff.writeUInt8(0x00);
+ // ipv4, ipv6, domain?
+ if (net.isIPv4(this._options.destination.host)) {
+ buff.writeUInt8(constants_1.Socks5HostType.IPv4);
+ buff.writeBuffer(ip.toBuffer(this._options.destination.host));
+ }
+ else if (net.isIPv6(this._options.destination.host)) {
+ buff.writeUInt8(constants_1.Socks5HostType.IPv6);
+ buff.writeBuffer(ip.toBuffer(this._options.destination.host));
+ }
+ else {
+ buff.writeUInt8(constants_1.Socks5HostType.Hostname);
+ buff.writeUInt8(this._options.destination.host.length);
+ buff.writeString(this._options.destination.host);
+ }
+ buff.writeUInt16BE(this._options.destination.port);
+ this._nextRequiredPacketBufferSize =
+ constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseHeader;
+ this._socket.write(buff.toBuffer());
+ this.state = constants_1.SocksClientState.SentFinalHandshake;
+ }
+ /**
+ * Handles Socks v5 final handshake response.
+ * @param data
+ */
+ handleSocks5FinalHandshakeResponse() {
+ // Peek at available data (we need at least 5 bytes to get the hostname length)
+ const header = this._receiveBuffer.peek(5);
+ if (header[0] !== 0x05 || header[1] !== constants_1.Socks5Response.Granted) {
+ this._closeSocket(`${constants_1.ERRORS.InvalidSocks5FinalHandshakeRejected} - ${constants_1.Socks5Response[header[1]]}`);
+ }
+ else {
+ // Read address type
+ const addressType = header[3];
+ let remoteHost;
+ let buff;
+ // IPv4
+ if (addressType === constants_1.Socks5HostType.IPv4) {
+ // Check if data is available.
+ const dataNeeded = constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseIPv4;
+ if (this._receiveBuffer.length < dataNeeded) {
+ this._nextRequiredPacketBufferSize = dataNeeded;
+ return;
}
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
+ buff = smart_buffer_1.SmartBuffer.fromBuffer(this._receiveBuffer.get(dataNeeded).slice(4));
+ remoteHost = {
+ host: ip.fromLong(buff.readUInt32BE()),
+ port: buff.readUInt16BE()
+ };
+ // If given host is 0.0.0.0, assume remote proxy ip instead.
+ if (remoteHost.host === '0.0.0.0') {
+ remoteHost.host = this._options.proxy.ipaddress;
}
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
+ // Hostname
+ }
+ else if (addressType === constants_1.Socks5HostType.Hostname) {
+ const hostLength = header[4];
+ const dataNeeded = constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseHostname(hostLength); // header + host length + host + port
+ // Check if data is available.
+ if (this._receiveBuffer.length < dataNeeded) {
+ this._nextRequiredPacketBufferSize = dataNeeded;
+ return;
}
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
+ buff = smart_buffer_1.SmartBuffer.fromBuffer(this._receiveBuffer.get(dataNeeded).slice(5) // Slice at 5 to skip host length
+ );
+ remoteHost = {
+ host: buff.readString(hostLength),
+ port: buff.readUInt16BE()
+ };
+ // IPv6
+ }
+ else if (addressType === constants_1.Socks5HostType.IPv6) {
+ // Check if data is available.
+ const dataNeeded = constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseIPv6;
+ if (this._receiveBuffer.length < dataNeeded) {
+ this._nextRequiredPacketBufferSize = dataNeeded;
+ return;
}
- },
- leaseId: {
- serializedName: "x-ms-lease-id",
- type: {
- name: "String"
+ buff = smart_buffer_1.SmartBuffer.fromBuffer(this._receiveBuffer.get(dataNeeded).slice(4));
+ remoteHost = {
+ host: ip.toString(buff.readBuffer(16)),
+ port: buff.readUInt16BE()
+ };
+ }
+ // We have everything we need
+ this.state = constants_1.SocksClientState.ReceivedFinalResponse;
+ // If using CONNECT, the client is now in the established state.
+ if (constants_1.SocksCommand[this._options.command] === constants_1.SocksCommand.connect) {
+ this.state = constants_1.SocksClientState.Established;
+ this.removeInternalSocketHandlers();
+ this.emit('established', { socket: this._socket });
+ }
+ else if (constants_1.SocksCommand[this._options.command] === constants_1.SocksCommand.bind) {
+ /* If using BIND, the Socks client is now in BoundWaitingForConnection state.
+ This means that the remote proxy server is waiting for a remote connection to the bound port. */
+ this.state = constants_1.SocksClientState.BoundWaitingForConnection;
+ this._nextRequiredPacketBufferSize =
+ constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseHeader;
+ this.emit('bound', { socket: this._socket, remoteHost });
+ /*
+ If using Associate, the Socks client is now Established. And the proxy server is now accepting UDP packets at the
+ given bound port. This initial Socks TCP connection must remain open for the UDP relay to continue to work.
+ */
+ }
+ else if (constants_1.SocksCommand[this._options.command] === constants_1.SocksCommand.associate) {
+ this.state = constants_1.SocksClientState.Established;
+ this.removeInternalSocketHandlers();
+ this.emit('established', { socket: this._socket, remoteHost });
+ }
+ }
+ }
+ /**
+ * Handles Socks v5 incoming connection request (BIND).
+ */
+ handleSocks5IncomingConnectionResponse() {
+ // Peek at available data (we need at least 5 bytes to get the hostname length)
+ const header = this._receiveBuffer.peek(5);
+ if (header[0] !== 0x05 || header[1] !== constants_1.Socks5Response.Granted) {
+ this._closeSocket(`${constants_1.ERRORS.Socks5ProxyRejectedIncomingBoundConnection} - ${constants_1.Socks5Response[header[1]]}`);
+ }
+ else {
+ // Read address type
+ const addressType = header[3];
+ let remoteHost;
+ let buff;
+ // IPv4
+ if (addressType === constants_1.Socks5HostType.IPv4) {
+ // Check if data is available.
+ const dataNeeded = constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseIPv4;
+ if (this._receiveBuffer.length < dataNeeded) {
+ this._nextRequiredPacketBufferSize = dataNeeded;
+ return;
}
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
+ buff = smart_buffer_1.SmartBuffer.fromBuffer(this._receiveBuffer.get(dataNeeded).slice(4));
+ remoteHost = {
+ host: ip.fromLong(buff.readUInt32BE()),
+ port: buff.readUInt16BE()
+ };
+ // If given host is 0.0.0.0, assume remote proxy ip instead.
+ if (remoteHost.host === '0.0.0.0') {
+ remoteHost.host = this._options.proxy.ipaddress;
}
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
+ // Hostname
+ }
+ else if (addressType === constants_1.Socks5HostType.Hostname) {
+ const hostLength = header[4];
+ const dataNeeded = constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseHostname(hostLength); // header + host length + port
+ // Check if data is available.
+ if (this._receiveBuffer.length < dataNeeded) {
+ this._nextRequiredPacketBufferSize = dataNeeded;
+ return;
}
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
+ buff = smart_buffer_1.SmartBuffer.fromBuffer(this._receiveBuffer.get(dataNeeded).slice(5) // Slice at 5 to skip host length
+ );
+ remoteHost = {
+ host: buff.readString(hostLength),
+ port: buff.readUInt16BE()
+ };
+ // IPv6
+ }
+ else if (addressType === constants_1.Socks5HostType.IPv6) {
+ // Check if data is available.
+ const dataNeeded = constants_1.SOCKS_INCOMING_PACKET_SIZES.Socks5ResponseIPv6;
+ if (this._receiveBuffer.length < dataNeeded) {
+ this._nextRequiredPacketBufferSize = dataNeeded;
+ return;
}
+ buff = smart_buffer_1.SmartBuffer.fromBuffer(this._receiveBuffer.get(dataNeeded).slice(4));
+ remoteHost = {
+ host: ip.toString(buff.readBuffer(16)),
+ port: buff.readUInt16BE()
+ };
}
+ this.state = constants_1.SocksClientState.Established;
+ this.removeInternalSocketHandlers();
+ this.emit('established', { socket: this._socket, remoteHost });
}
}
-};
-var BlobBreakLeaseHeaders = {
- serializedName: "blob-breaklease-headers",
- type: {
- name: "Composite",
- className: "BlobBreakLeaseHeaders",
- modelProperties: {
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- leaseTime: {
- serializedName: "x-ms-lease-time",
- type: {
- name: "Number"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
- }
+ get socksClientOptions() {
+ return Object.assign({}, this._options);
}
-};
-var BlobCreateSnapshotHeaders = {
- serializedName: "blob-createsnapshot-headers",
- type: {
- name: "Composite",
- className: "BlobCreateSnapshotHeaders",
- modelProperties: {
- snapshot: {
- serializedName: "x-ms-snapshot",
- type: {
- name: "String"
- }
- },
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- versionId: {
- serializedName: "x-ms-version-id",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- isServerEncrypted: {
- serializedName: "x-ms-request-server-encrypted",
- type: {
- name: "Boolean"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
- }
+}
+exports.SocksClient = SocksClient;
+//# sourceMappingURL=socksclient.js.map
+
+/***/ }),
+/* 360 */,
+/* 361 */,
+/* 362 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+var util = __webpack_require__(669)
+var messages = __webpack_require__(132)
+
+module.exports = function() {
+ var args = Array.prototype.slice.call(arguments, 0)
+ var warningName = args.shift()
+ if (warningName == "typo") {
+ return makeTypoWarning.apply(null,args)
+ }
+ else {
+ var msgTemplate = messages[warningName] ? messages[warningName] : warningName + ": '%s'"
+ args.unshift(msgTemplate)
+ return util.format.apply(null, args)
+ }
+}
+
+function makeTypoWarning (providedName, probableName, field) {
+ if (field) {
+ providedName = field + "['" + providedName + "']"
+ probableName = field + "['" + probableName + "']"
+ }
+ return util.format(messages.typo, providedName, probableName)
+}
+
+
+/***/ }),
+/* 363 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+module.exports = function(Promise,
+ PromiseArray,
+ apiRejection,
+ tryConvertToPromise,
+ INTERNAL,
+ debug) {
+var util = __webpack_require__(248);
+var tryCatch = util.tryCatch;
+
+function ReductionPromiseArray(promises, fn, initialValue, _each) {
+ this.constructor$(promises);
+ var context = Promise._getContext();
+ this._fn = util.contextBind(context, fn);
+ if (initialValue !== undefined) {
+ initialValue = Promise.resolve(initialValue);
+ initialValue._attachCancellationCallback(this);
+ }
+ this._initialValue = initialValue;
+ this._currentCancellable = null;
+ if(_each === INTERNAL) {
+ this._eachValues = Array(this._length);
+ } else if (_each === 0) {
+ this._eachValues = null;
+ } else {
+ this._eachValues = undefined;
+ }
+ this._promise._captureStackTrace();
+ this._init$(undefined, -5);
+}
+util.inherits(ReductionPromiseArray, PromiseArray);
+
+ReductionPromiseArray.prototype._gotAccum = function(accum) {
+ if (this._eachValues !== undefined &&
+ this._eachValues !== null &&
+ accum !== INTERNAL) {
+ this._eachValues.push(accum);
}
};
-var BlobStartCopyFromURLHeaders = {
- serializedName: "blob-startcopyfromurl-headers",
- type: {
- name: "Composite",
- className: "BlobStartCopyFromURLHeaders",
- modelProperties: {
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- versionId: {
- serializedName: "x-ms-version-id",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- copyId: {
- serializedName: "x-ms-copy-id",
- type: {
- name: "String"
- }
- },
- copyStatus: {
- serializedName: "x-ms-copy-status",
- type: {
- name: "Enum",
- allowedValues: [
- "pending",
- "success",
- "aborted",
- "failed"
- ]
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
- }
+
+ReductionPromiseArray.prototype._eachComplete = function(value) {
+ if (this._eachValues !== null) {
+ this._eachValues.push(value);
}
+ return this._eachValues;
};
-var BlobCopyFromURLHeaders = {
- serializedName: "blob-copyfromurl-headers",
- type: {
- name: "Composite",
- className: "BlobCopyFromURLHeaders",
- modelProperties: {
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- versionId: {
- serializedName: "x-ms-version-id",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- copyId: {
- serializedName: "x-ms-copy-id",
- type: {
- name: "String"
- }
- },
- copyStatus: {
- serializedName: "x-ms-copy-status",
- type: {
- name: "Enum",
- allowedValues: [
- "success"
- ]
- }
- },
- contentMD5: {
- serializedName: "content-md5",
- type: {
- name: "ByteArray"
- }
- },
- xMsContentCrc64: {
- serializedName: "x-ms-content-crc64",
- type: {
- name: "ByteArray"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
- }
+
+ReductionPromiseArray.prototype._init = function() {};
+
+ReductionPromiseArray.prototype._resolveEmptyArray = function() {
+ this._resolve(this._eachValues !== undefined ? this._eachValues
+ : this._initialValue);
+};
+
+ReductionPromiseArray.prototype.shouldCopyValues = function () {
+ return false;
+};
+
+ReductionPromiseArray.prototype._resolve = function(value) {
+ this._promise._resolveCallback(value);
+ this._values = null;
+};
+
+ReductionPromiseArray.prototype._resultCancelled = function(sender) {
+ if (sender === this._initialValue) return this._cancel();
+ if (this._isResolved()) return;
+ this._resultCancelled$();
+ if (this._currentCancellable instanceof Promise) {
+ this._currentCancellable.cancel();
+ }
+ if (this._initialValue instanceof Promise) {
+ this._initialValue.cancel();
}
};
-var BlobAbortCopyFromURLHeaders = {
- serializedName: "blob-abortcopyfromurl-headers",
- type: {
- name: "Composite",
- className: "BlobAbortCopyFromURLHeaders",
- modelProperties: {
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
- }
+
+ReductionPromiseArray.prototype._iterate = function (values) {
+ this._values = values;
+ var value;
+ var i;
+ var length = values.length;
+ if (this._initialValue !== undefined) {
+ value = this._initialValue;
+ i = 0;
+ } else {
+ value = Promise.resolve(values[0]);
+ i = 1;
+ }
+
+ this._currentCancellable = value;
+
+ for (var j = i; j < length; ++j) {
+ var maybePromise = values[j];
+ if (maybePromise instanceof Promise) {
+ maybePromise.suppressUnhandledRejections();
}
}
-};
-var BlobSetTierHeaders = {
- serializedName: "blob-settier-headers",
- type: {
- name: "Composite",
- className: "BlobSetTierHeaders",
- modelProperties: {
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
- type: {
- name: "String"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
+
+ if (!value.isRejected()) {
+ for (; i < length; ++i) {
+ var ctx = {
+ accum: null,
+ value: values[i],
+ index: i,
+ length: length,
+ array: this
+ };
+
+ value = value._then(gotAccum, undefined, undefined, ctx, undefined);
+
+ if ((i & 127) === 0) {
+ value._setNoAsyncGuarantee();
}
}
}
+
+ if (this._eachValues !== undefined) {
+ value = value
+ ._then(this._eachComplete, undefined, undefined, this, undefined);
+ }
+ value._then(completed, completed, undefined, value, this);
};
-var BlobGetAccountInfoHeaders = {
- serializedName: "blob-getaccountinfo-headers",
- type: {
- name: "Composite",
- className: "BlobGetAccountInfoHeaders",
- modelProperties: {
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
+
+Promise.prototype.reduce = function (fn, initialValue) {
+ return reduce(this, fn, initialValue, null);
+};
+
+Promise.reduce = function (promises, fn, initialValue, _each) {
+ return reduce(promises, fn, initialValue, _each);
+};
+
+function completed(valueOrReason, array) {
+ if (this.isFulfilled()) {
+ array._resolve(valueOrReason);
+ } else {
+ array._reject(valueOrReason);
+ }
+}
+
+function reduce(promises, fn, initialValue, _each) {
+ if (typeof fn !== "function") {
+ return apiRejection("expecting a function but got " + util.classString(fn));
+ }
+ var array = new ReductionPromiseArray(promises, fn, initialValue, _each);
+ return array.promise();
+}
+
+function gotAccum(accum) {
+ this.accum = accum;
+ this.array._gotAccum(accum);
+ var value = tryConvertToPromise(this.value, this.array._promise);
+ if (value instanceof Promise) {
+ this.array._currentCancellable = value;
+ return value._then(gotValue, undefined, undefined, this, undefined);
+ } else {
+ return gotValue.call(this, value);
+ }
+}
+
+function gotValue(value) {
+ var array = this.array;
+ var promise = array._promise;
+ var fn = tryCatch(array._fn);
+ promise._pushContext();
+ var ret;
+ if (array._eachValues !== undefined) {
+ ret = fn.call(promise._boundValue(), value, this.index, this.length);
+ } else {
+ ret = fn.call(promise._boundValue(),
+ this.accum, value, this.index, this.length);
+ }
+ if (ret instanceof Promise) {
+ array._currentCancellable = ret;
+ }
+ var promiseCreated = promise._popContext();
+ debug.checkForgottenReturns(
+ ret,
+ promiseCreated,
+ array._eachValues !== undefined ? "Promise.each" : "Promise.reduce",
+ promise
+ );
+ return ret;
+}
+};
+
+
+/***/ }),
+/* 364 */
+/***/ (function(module) {
+
+"use strict";
+
+
+module.exports = (flag, argv = process.argv) => {
+ const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--');
+ const position = argv.indexOf(prefix + flag);
+ const terminatorPosition = argv.indexOf('--');
+ return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition);
+};
+
+
+/***/ }),
+/* 365 */,
+/* 366 */
+/***/ (function(module) {
+
+var twoify = function (n) {
+ if (n && !(n & (n - 1))) return n
+ var p = 1
+ while (p < n) p <<= 1
+ return p
+}
+
+var Cyclist = function (size) {
+ if (!(this instanceof Cyclist)) return new Cyclist(size)
+ size = twoify(size)
+ this.mask = size - 1
+ this.size = size
+ this.values = new Array(size)
+}
+
+Cyclist.prototype.put = function (index, val) {
+ var pos = index & this.mask
+ this.values[pos] = val
+ return pos
+}
+
+Cyclist.prototype.get = function (index) {
+ return this.values[index & this.mask]
+}
+
+Cyclist.prototype.del = function (index) {
+ var pos = index & this.mask
+ var val = this.values[pos]
+ this.values[pos] = undefined
+ return val
+}
+
+module.exports = Cyclist
+
+
+/***/ }),
+/* 367 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+const compare = __webpack_require__(874)
+const lt = (a, b, loose) => compare(a, b, loose) < 0
+module.exports = lt
+
+
+/***/ }),
+/* 368 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+
+const figgyPudding = __webpack_require__(965)
+const npa = __webpack_require__(482)
+const npmFetch = __webpack_require__(789)
+const semver = __webpack_require__(516)
+const url = __webpack_require__(835)
+
+const UnpublishConfig = figgyPudding({
+ force: { default: false },
+ Promise: { default: () => Promise }
+})
+
+module.exports = unpublish
+function unpublish (spec, opts) {
+ opts = UnpublishConfig(opts)
+ return new opts.Promise(resolve => resolve()).then(() => {
+ spec = npa(spec)
+ // NOTE: spec is used to pick the appropriate registry/auth combo.
+ opts = opts.concat({ spec })
+ const pkgUri = spec.escapedName
+ return npmFetch.json(pkgUri, opts.concat({
+ query: { write: true }
+ })).then(pkg => {
+ if (!spec.rawSpec || spec.rawSpec === '*') {
+ return npmFetch(`${pkgUri}/-rev/${pkg._rev}`, opts.concat({
+ method: 'DELETE',
+ ignoreBody: true
+ }))
+ } else {
+ const version = spec.rawSpec
+ const allVersions = pkg.versions || {}
+ const versionPublic = allVersions[version]
+ let dist
+ if (versionPublic) {
+ dist = allVersions[version].dist
+ }
+ delete allVersions[version]
+ // if it was the only version, then delete the whole package.
+ if (!Object.keys(allVersions).length) {
+ return npmFetch(`${pkgUri}/-rev/${pkg._rev}`, opts.concat({
+ method: 'DELETE',
+ ignoreBody: true
+ }))
+ } else if (versionPublic) {
+ const latestVer = pkg['dist-tags'].latest
+ Object.keys(pkg['dist-tags']).forEach(tag => {
+ if (pkg['dist-tags'][tag] === version) {
+ delete pkg['dist-tags'][tag]
+ }
+ })
+
+ if (latestVer === version) {
+ pkg['dist-tags'].latest = Object.keys(
+ allVersions
+ ).sort(semver.compareLoose).pop()
+ }
+
+ delete pkg._revisions
+ delete pkg._attachments
+ // Update packument with removed versions
+ return npmFetch(`${pkgUri}/-rev/${pkg._rev}`, opts.concat({
+ method: 'PUT',
+ body: pkg,
+ ignoreBody: true
+ })).then(() => {
+ // Remove the tarball itself
+ return npmFetch.json(pkgUri, opts.concat({
+ query: { write: true }
+ })).then(({ _rev, _id }) => {
+ const tarballUrl = url.parse(dist.tarball).pathname.substr(1)
+ return npmFetch(`${tarballUrl}/-rev/${_rev}`, opts.concat({
+ method: 'DELETE',
+ ignoreBody: true
+ }))
+ })
+ })
+ }
+ }
+ }, err => {
+ if (err.code !== 'E404') {
+ throw err
+ }
+ })
+ }).then(() => true)
+}
+
+
+/***/ }),
+/* 369 */,
+/* 370 */,
+/* 371 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports.pipe = __webpack_require__(284)
+module.exports.each = __webpack_require__(137)
+module.exports.pipeline = __webpack_require__(746)
+module.exports.duplex = __webpack_require__(394)
+module.exports.through = __webpack_require__(576)
+module.exports.concat = __webpack_require__(596)
+module.exports.finished = __webpack_require__(3)
+module.exports.from = __webpack_require__(868)
+module.exports.to = __webpack_require__(6)
+module.exports.parallel = __webpack_require__(565)
+
+
+/***/ }),
+/* 372 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+const util_1 = __webpack_require__(526);
+const constants_1 = __webpack_require__(583);
+const stream = __webpack_require__(794);
+/**
+ * Validates the provided SocksClientOptions
+ * @param options { SocksClientOptions }
+ * @param acceptedCommands { string[] } A list of accepted SocksProxy commands.
+ */
+function validateSocksClientOptions(options, acceptedCommands = ['connect', 'bind', 'associate']) {
+ // Check SOCKs command option.
+ if (!constants_1.SocksCommand[options.command]) {
+ throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksCommand, options);
+ }
+ // Check SocksCommand for acceptable command.
+ if (acceptedCommands.indexOf(options.command) === -1) {
+ throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksCommandForOperation, options);
+ }
+ // Check destination
+ if (!isValidSocksRemoteHost(options.destination)) {
+ throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsDestination, options);
+ }
+ // Check SOCKS proxy to use
+ if (!isValidSocksProxy(options.proxy)) {
+ throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsProxy, options);
+ }
+ // Check timeout
+ if (options.timeout && !isValidTimeoutValue(options.timeout)) {
+ throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsTimeout, options);
+ }
+ // Check existing_socket (if provided)
+ if (options.existing_socket &&
+ !(options.existing_socket instanceof stream.Duplex)) {
+ throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsExistingSocket, options);
+ }
+}
+exports.validateSocksClientOptions = validateSocksClientOptions;
+/**
+ * Validates the SocksClientChainOptions
+ * @param options { SocksClientChainOptions }
+ */
+function validateSocksClientChainOptions(options) {
+ // Only connect is supported when chaining.
+ if (options.command !== 'connect') {
+ throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksCommandChain, options);
+ }
+ // Check destination
+ if (!isValidSocksRemoteHost(options.destination)) {
+ throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsDestination, options);
+ }
+ // Validate proxies (length)
+ if (!(options.proxies &&
+ Array.isArray(options.proxies) &&
+ options.proxies.length >= 2)) {
+ throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsProxiesLength, options);
+ }
+ // Validate proxies
+ options.proxies.forEach((proxy) => {
+ if (!isValidSocksProxy(proxy)) {
+ throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsProxy, options);
+ }
+ });
+ // Check timeout
+ if (options.timeout && !isValidTimeoutValue(options.timeout)) {
+ throw new util_1.SocksClientError(constants_1.ERRORS.InvalidSocksClientOptionsTimeout, options);
+ }
+}
+exports.validateSocksClientChainOptions = validateSocksClientChainOptions;
+/**
+ * Validates a SocksRemoteHost
+ * @param remoteHost { SocksRemoteHost }
+ */
+function isValidSocksRemoteHost(remoteHost) {
+ return (remoteHost &&
+ typeof remoteHost.host === 'string' &&
+ typeof remoteHost.port === 'number' &&
+ remoteHost.port >= 0 &&
+ remoteHost.port <= 65535);
+}
+/**
+ * Validates a SocksProxy
+ * @param proxy { SocksProxy }
+ */
+function isValidSocksProxy(proxy) {
+ return (proxy &&
+ (typeof proxy.host === 'string' || typeof proxy.ipaddress === 'string') &&
+ typeof proxy.port === 'number' &&
+ proxy.port >= 0 &&
+ proxy.port <= 65535 &&
+ (proxy.type === 4 || proxy.type === 5));
+}
+/**
+ * Validates a timeout value.
+ * @param value { Number }
+ */
+function isValidTimeoutValue(value) {
+ return typeof value === 'number' && value > 0;
+}
+//# sourceMappingURL=helpers.js.map
+
+/***/ }),
+/* 373 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+
+Object.defineProperty(exports, '__esModule', { value: true });
+
+var coreHttp = __webpack_require__(999);
+var tslib = __webpack_require__(815);
+var api = __webpack_require__(440);
+var logger$1 = __webpack_require__(492);
+var abortController = __webpack_require__(106);
+var os = __webpack_require__(87);
+var stream = __webpack_require__(794);
+__webpack_require__(242);
+var crypto = __webpack_require__(417);
+var coreLro = __webpack_require__(889);
+var events = __webpack_require__(614);
+var coreTracing = __webpack_require__(263);
+var fs = __webpack_require__(747);
+var util = __webpack_require__(669);
+
+/*
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ * Licensed under the MIT License. See License.txt in the project root for license information.
+ *
+ * Code generated by Microsoft (R) AutoRest Code Generator.
+ * Changes may cause incorrect behavior and will be lost if the code is regenerated.
+ */
+var KeyInfo = {
+ serializedName: "KeyInfo",
+ type: {
+ name: "Composite",
+ className: "KeyInfo",
+ modelProperties: {
+ startsOn: {
+ xmlName: "Start",
+ required: true,
+ serializedName: "Start",
type: {
name: "String"
}
},
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- skuName: {
- serializedName: "x-ms-sku-name",
- type: {
- name: "Enum",
- allowedValues: [
- "Standard_LRS",
- "Standard_GRS",
- "Standard_RAGRS",
- "Standard_ZRS",
- "Premium_LRS"
- ]
- }
- },
- accountKind: {
- serializedName: "x-ms-account-kind",
- type: {
- name: "Enum",
- allowedValues: [
- "Storage",
- "BlobStorage",
- "StorageV2",
- "FileStorage",
- "BlockBlobStorage"
- ]
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
+ expiresOn: {
+ xmlName: "Expiry",
+ required: true,
+ serializedName: "Expiry",
type: {
name: "String"
}
@@ -36055,68 +32743,64 @@ var BlobGetAccountInfoHeaders = {
}
}
};
-var BlockBlobStageBlockHeaders = {
- serializedName: "blockblob-stageblock-headers",
+var UserDelegationKey = {
+ serializedName: "UserDelegationKey",
type: {
name: "Composite",
- className: "BlockBlobStageBlockHeaders",
+ className: "UserDelegationKey",
modelProperties: {
- contentMD5: {
- serializedName: "content-md5",
- type: {
- name: "ByteArray"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
+ signedObjectId: {
+ xmlName: "SignedOid",
+ required: true,
+ serializedName: "SignedOid",
type: {
name: "String"
}
},
- requestId: {
- serializedName: "x-ms-request-id",
+ signedTenantId: {
+ xmlName: "SignedTid",
+ required: true,
+ serializedName: "SignedTid",
type: {
name: "String"
}
},
- version: {
- serializedName: "x-ms-version",
+ signedStartsOn: {
+ xmlName: "SignedStart",
+ required: true,
+ serializedName: "SignedStart",
type: {
name: "String"
}
},
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- xMsContentCrc64: {
- serializedName: "x-ms-content-crc64",
- type: {
- name: "ByteArray"
- }
- },
- isServerEncrypted: {
- serializedName: "x-ms-request-server-encrypted",
+ signedExpiresOn: {
+ xmlName: "SignedExpiry",
+ required: true,
+ serializedName: "SignedExpiry",
type: {
- name: "Boolean"
+ name: "String"
}
},
- encryptionKeySha256: {
- serializedName: "x-ms-encryption-key-sha256",
+ signedService: {
+ xmlName: "SignedService",
+ required: true,
+ serializedName: "SignedService",
type: {
name: "String"
}
},
- encryptionScope: {
- serializedName: "x-ms-encryption-scope",
+ signedVersion: {
+ xmlName: "SignedVersion",
+ required: true,
+ serializedName: "SignedVersion",
type: {
name: "String"
}
},
- errorCode: {
- serializedName: "x-ms-error-code",
+ value: {
+ xmlName: "Value",
+ required: true,
+ serializedName: "Value",
type: {
name: "String"
}
@@ -36124,68 +32808,85 @@ var BlockBlobStageBlockHeaders = {
}
}
};
-var BlockBlobStageBlockFromURLHeaders = {
- serializedName: "blockblob-stageblockfromurl-headers",
+var StorageError = {
+ serializedName: "StorageError",
type: {
name: "Composite",
- className: "BlockBlobStageBlockFromURLHeaders",
+ className: "StorageError",
modelProperties: {
- contentMD5: {
- serializedName: "content-md5",
- type: {
- name: "ByteArray"
- }
- },
- xMsContentCrc64: {
- serializedName: "x-ms-content-crc64",
+ message: {
+ xmlName: "Message",
+ serializedName: "Message",
type: {
- name: "ByteArray"
+ name: "String"
}
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
+ }
+ }
+ }
+};
+var DataLakeStorageErrorError = {
+ serializedName: "DataLakeStorageError_error",
+ type: {
+ name: "Composite",
+ className: "DataLakeStorageErrorError",
+ modelProperties: {
+ code: {
+ xmlName: "Code",
+ serializedName: "Code",
type: {
name: "String"
}
},
- version: {
- serializedName: "x-ms-version",
+ message: {
+ xmlName: "Message",
+ serializedName: "Message",
type: {
name: "String"
}
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- isServerEncrypted: {
- serializedName: "x-ms-request-server-encrypted",
+ }
+ }
+ }
+};
+var DataLakeStorageError = {
+ serializedName: "DataLakeStorageError",
+ type: {
+ name: "Composite",
+ className: "DataLakeStorageError",
+ modelProperties: {
+ dataLakeStorageErrorDetails: {
+ xmlName: "error",
+ serializedName: "error",
type: {
- name: "Boolean"
+ name: "Composite",
+ className: "DataLakeStorageErrorError"
}
- },
- encryptionKeySha256: {
- serializedName: "x-ms-encryption-key-sha256",
+ }
+ }
+ }
+};
+var AccessPolicy = {
+ serializedName: "AccessPolicy",
+ type: {
+ name: "Composite",
+ className: "AccessPolicy",
+ modelProperties: {
+ startsOn: {
+ xmlName: "Start",
+ serializedName: "Start",
type: {
name: "String"
}
},
- encryptionScope: {
- serializedName: "x-ms-encryption-scope",
+ expiresOn: {
+ xmlName: "Expiry",
+ serializedName: "Expiry",
type: {
name: "String"
}
},
- errorCode: {
- serializedName: "x-ms-error-code",
+ permissions: {
+ xmlName: "Permission",
+ serializedName: "Permission",
type: {
name: "String"
}
@@ -36193,236 +32894,289 @@ var BlockBlobStageBlockFromURLHeaders = {
}
}
};
-var BlockBlobCommitBlockListHeaders = {
- serializedName: "blockblob-commitblocklist-headers",
+var BlobPropertiesInternal = {
+ xmlName: "Properties",
+ serializedName: "BlobPropertiesInternal",
type: {
name: "Composite",
- className: "BlockBlobCommitBlockListHeaders",
+ className: "BlobPropertiesInternal",
modelProperties: {
- etag: {
- serializedName: "etag",
+ createdOn: {
+ xmlName: "Creation-Time",
+ serializedName: "Creation-Time",
type: {
- name: "String"
+ name: "DateTimeRfc1123"
}
},
lastModified: {
- serializedName: "last-modified",
+ xmlName: "Last-Modified",
+ required: true,
+ serializedName: "Last-Modified",
type: {
name: "DateTimeRfc1123"
}
},
- contentMD5: {
- serializedName: "content-md5",
+ etag: {
+ xmlName: "Etag",
+ required: true,
+ serializedName: "Etag",
type: {
- name: "ByteArray"
+ name: "String"
}
},
- xMsContentCrc64: {
- serializedName: "x-ms-content-crc64",
+ contentLength: {
+ xmlName: "Content-Length",
+ serializedName: "Content-Length",
type: {
- name: "ByteArray"
+ name: "Number"
}
},
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
+ contentType: {
+ xmlName: "Content-Type",
+ serializedName: "Content-Type",
type: {
name: "String"
}
},
- requestId: {
- serializedName: "x-ms-request-id",
+ contentEncoding: {
+ xmlName: "Content-Encoding",
+ serializedName: "Content-Encoding",
type: {
name: "String"
}
},
- version: {
- serializedName: "x-ms-version",
+ contentLanguage: {
+ xmlName: "Content-Language",
+ serializedName: "Content-Language",
type: {
name: "String"
}
},
- versionId: {
- serializedName: "x-ms-version-id",
+ contentMD5: {
+ xmlName: "Content-MD5",
+ serializedName: "Content-MD5",
type: {
- name: "String"
+ name: "ByteArray"
}
},
- date: {
- serializedName: "date",
+ contentDisposition: {
+ xmlName: "Content-Disposition",
+ serializedName: "Content-Disposition",
type: {
- name: "DateTimeRfc1123"
+ name: "String"
}
},
- isServerEncrypted: {
- serializedName: "x-ms-request-server-encrypted",
+ cacheControl: {
+ xmlName: "Cache-Control",
+ serializedName: "Cache-Control",
type: {
- name: "Boolean"
+ name: "String"
}
},
- encryptionKeySha256: {
- serializedName: "x-ms-encryption-key-sha256",
+ blobSequenceNumber: {
+ xmlName: "x-ms-blob-sequence-number",
+ serializedName: "x-ms-blob-sequence-number",
type: {
- name: "String"
+ name: "Number"
}
},
- encryptionScope: {
- serializedName: "x-ms-encryption-scope",
+ blobType: {
+ xmlName: "BlobType",
+ serializedName: "BlobType",
type: {
- name: "String"
+ name: "Enum",
+ allowedValues: [
+ "BlockBlob",
+ "PageBlob",
+ "AppendBlob"
+ ]
}
},
- errorCode: {
- serializedName: "x-ms-error-code",
+ leaseStatus: {
+ xmlName: "LeaseStatus",
+ serializedName: "LeaseStatus",
type: {
- name: "String"
+ name: "Enum",
+ allowedValues: [
+ "locked",
+ "unlocked"
+ ]
}
- }
- }
- }
-};
-var BlockBlobGetBlockListHeaders = {
- serializedName: "blockblob-getblocklist-headers",
- type: {
- name: "Composite",
- className: "BlockBlobGetBlockListHeaders",
- modelProperties: {
- lastModified: {
- serializedName: "last-modified",
+ },
+ leaseState: {
+ xmlName: "LeaseState",
+ serializedName: "LeaseState",
type: {
- name: "DateTimeRfc1123"
+ name: "Enum",
+ allowedValues: [
+ "available",
+ "leased",
+ "expired",
+ "breaking",
+ "broken"
+ ]
}
},
- etag: {
- serializedName: "etag",
+ leaseDuration: {
+ xmlName: "LeaseDuration",
+ serializedName: "LeaseDuration",
type: {
- name: "String"
+ name: "Enum",
+ allowedValues: [
+ "infinite",
+ "fixed"
+ ]
}
},
- contentType: {
- serializedName: "content-type",
+ copyId: {
+ xmlName: "CopyId",
+ serializedName: "CopyId",
type: {
name: "String"
}
},
- blobContentLength: {
- serializedName: "x-ms-blob-content-length",
+ copyStatus: {
+ xmlName: "CopyStatus",
+ serializedName: "CopyStatus",
type: {
- name: "Number"
+ name: "Enum",
+ allowedValues: [
+ "pending",
+ "success",
+ "aborted",
+ "failed"
+ ]
}
},
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
+ copySource: {
+ xmlName: "CopySource",
+ serializedName: "CopySource",
type: {
name: "String"
}
},
- requestId: {
- serializedName: "x-ms-request-id",
+ copyProgress: {
+ xmlName: "CopyProgress",
+ serializedName: "CopyProgress",
type: {
name: "String"
}
},
- version: {
- serializedName: "x-ms-version",
+ copyCompletedOn: {
+ xmlName: "CopyCompletionTime",
+ serializedName: "CopyCompletionTime",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ copyStatusDescription: {
+ xmlName: "CopyStatusDescription",
+ serializedName: "CopyStatusDescription",
type: {
name: "String"
}
},
- date: {
- serializedName: "date",
+ serverEncrypted: {
+ xmlName: "ServerEncrypted",
+ serializedName: "ServerEncrypted",
type: {
- name: "DateTimeRfc1123"
+ name: "Boolean"
}
},
- errorCode: {
- serializedName: "x-ms-error-code",
+ incrementalCopy: {
+ xmlName: "IncrementalCopy",
+ serializedName: "IncrementalCopy",
type: {
- name: "String"
+ name: "Boolean"
}
- }
- }
- }
-};
-var PageBlobUploadPagesHeaders = {
- serializedName: "pageblob-uploadpages-headers",
- type: {
- name: "Composite",
- className: "PageBlobUploadPagesHeaders",
- modelProperties: {
- etag: {
- serializedName: "etag",
+ },
+ destinationSnapshot: {
+ xmlName: "DestinationSnapshot",
+ serializedName: "DestinationSnapshot",
type: {
name: "String"
}
},
- lastModified: {
- serializedName: "last-modified",
+ deletedOn: {
+ xmlName: "DeletedTime",
+ serializedName: "DeletedTime",
type: {
name: "DateTimeRfc1123"
}
},
- contentMD5: {
- serializedName: "content-md5",
+ remainingRetentionDays: {
+ xmlName: "RemainingRetentionDays",
+ serializedName: "RemainingRetentionDays",
type: {
- name: "ByteArray"
+ name: "Number"
}
},
- xMsContentCrc64: {
- serializedName: "x-ms-content-crc64",
+ accessTier: {
+ xmlName: "AccessTier",
+ serializedName: "AccessTier",
type: {
- name: "ByteArray"
+ name: "String"
}
},
- blobSequenceNumber: {
- serializedName: "x-ms-blob-sequence-number",
+ accessTierInferred: {
+ xmlName: "AccessTierInferred",
+ serializedName: "AccessTierInferred",
type: {
- name: "Number"
+ name: "Boolean"
}
},
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
+ archiveStatus: {
+ xmlName: "ArchiveStatus",
+ serializedName: "ArchiveStatus",
type: {
name: "String"
}
},
- requestId: {
- serializedName: "x-ms-request-id",
+ customerProvidedKeySha256: {
+ xmlName: "CustomerProvidedKeySha256",
+ serializedName: "CustomerProvidedKeySha256",
type: {
name: "String"
}
},
- version: {
- serializedName: "x-ms-version",
+ encryptionScope: {
+ xmlName: "EncryptionScope",
+ serializedName: "EncryptionScope",
type: {
name: "String"
}
},
- date: {
- serializedName: "date",
+ accessTierChangedOn: {
+ xmlName: "AccessTierChangeTime",
+ serializedName: "AccessTierChangeTime",
type: {
name: "DateTimeRfc1123"
}
},
- isServerEncrypted: {
- serializedName: "x-ms-request-server-encrypted",
+ tagCount: {
+ xmlName: "TagCount",
+ serializedName: "TagCount",
type: {
- name: "Boolean"
+ name: "Number"
}
},
- encryptionKeySha256: {
- serializedName: "x-ms-encryption-key-sha256",
+ expiresOn: {
+ xmlName: "Expiry-Time",
+ serializedName: "Expiry-Time",
type: {
- name: "String"
+ name: "DateTimeRfc1123"
}
},
- encryptionScope: {
- serializedName: "x-ms-encryption-scope",
+ isSealed: {
+ xmlName: "Sealed",
+ serializedName: "Sealed",
type: {
- name: "String"
+ name: "Boolean"
}
},
- errorCode: {
- serializedName: "x-ms-error-code",
+ rehydratePriority: {
+ xmlName: "RehydratePriority",
+ serializedName: "RehydratePriority",
type: {
name: "String"
}
@@ -36430,206 +33184,230 @@ var PageBlobUploadPagesHeaders = {
}
}
};
-var PageBlobClearPagesHeaders = {
- serializedName: "pageblob-clearpages-headers",
+var BlobTag = {
+ xmlName: "Tag",
+ serializedName: "BlobTag",
type: {
name: "Composite",
- className: "PageBlobClearPagesHeaders",
+ className: "BlobTag",
modelProperties: {
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- contentMD5: {
- serializedName: "content-md5",
- type: {
- name: "ByteArray"
- }
- },
- xMsContentCrc64: {
- serializedName: "x-ms-content-crc64",
- type: {
- name: "ByteArray"
- }
- },
- blobSequenceNumber: {
- serializedName: "x-ms-blob-sequence-number",
- type: {
- name: "Number"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
+ key: {
+ xmlName: "Key",
+ required: true,
+ serializedName: "Key",
type: {
name: "String"
}
},
- version: {
- serializedName: "x-ms-version",
+ value: {
+ xmlName: "Value",
+ required: true,
+ serializedName: "Value",
type: {
name: "String"
}
- },
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
+ }
+ }
+ }
+};
+var BlobTags = {
+ xmlName: "Tags",
+ serializedName: "BlobTags",
+ type: {
+ name: "Composite",
+ className: "BlobTags",
+ modelProperties: {
+ blobTagSet: {
+ xmlIsWrapped: true,
+ xmlName: "TagSet",
+ xmlElementName: "Tag",
+ required: true,
+ serializedName: "BlobTagSet",
type: {
- name: "String"
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "BlobTag"
+ }
+ }
}
}
}
}
};
-var PageBlobUploadPagesFromURLHeaders = {
- serializedName: "pageblob-uploadpagesfromurl-headers",
+var BlobItemInternal = {
+ xmlName: "Blob",
+ serializedName: "BlobItemInternal",
type: {
name: "Composite",
- className: "PageBlobUploadPagesFromURLHeaders",
+ className: "BlobItemInternal",
modelProperties: {
- etag: {
- serializedName: "etag",
+ name: {
+ xmlName: "Name",
+ required: true,
+ serializedName: "Name",
type: {
name: "String"
}
},
- lastModified: {
- serializedName: "last-modified",
+ deleted: {
+ xmlName: "Deleted",
+ required: true,
+ serializedName: "Deleted",
type: {
- name: "DateTimeRfc1123"
+ name: "Boolean"
}
},
- contentMD5: {
- serializedName: "content-md5",
+ snapshot: {
+ xmlName: "Snapshot",
+ required: true,
+ serializedName: "Snapshot",
type: {
- name: "ByteArray"
+ name: "String"
}
},
- xMsContentCrc64: {
- serializedName: "x-ms-content-crc64",
+ versionId: {
+ xmlName: "VersionId",
+ serializedName: "VersionId",
type: {
- name: "ByteArray"
+ name: "String"
}
},
- blobSequenceNumber: {
- serializedName: "x-ms-blob-sequence-number",
+ isCurrentVersion: {
+ xmlName: "IsCurrentVersion",
+ serializedName: "IsCurrentVersion",
type: {
- name: "Number"
+ name: "Boolean"
}
},
- requestId: {
- serializedName: "x-ms-request-id",
+ properties: {
+ xmlName: "Properties",
+ required: true,
+ serializedName: "Properties",
type: {
- name: "String"
+ name: "Composite",
+ className: "BlobPropertiesInternal"
}
},
- version: {
- serializedName: "x-ms-version",
+ metadata: {
+ xmlName: "Metadata",
+ serializedName: "Metadata",
type: {
- name: "String"
+ name: "Dictionary",
+ value: {
+ type: {
+ name: "String"
+ }
+ }
}
},
- date: {
- serializedName: "date",
+ blobTags: {
+ xmlName: "Tags",
+ serializedName: "BlobTags",
type: {
- name: "DateTimeRfc1123"
- }
- },
- isServerEncrypted: {
- serializedName: "x-ms-request-server-encrypted",
- type: {
- name: "Boolean"
- }
- },
- encryptionKeySha256: {
- serializedName: "x-ms-encryption-key-sha256",
- type: {
- name: "String"
- }
- },
- encryptionScope: {
- serializedName: "x-ms-encryption-scope",
- type: {
- name: "String"
+ name: "Composite",
+ className: "BlobTags"
}
},
- errorCode: {
- serializedName: "x-ms-error-code",
+ objectReplicationMetadata: {
+ xmlName: "OrMetadata",
+ serializedName: "ObjectReplicationMetadata",
type: {
- name: "String"
+ name: "Dictionary",
+ value: {
+ type: {
+ name: "String"
+ }
+ }
}
}
}
}
};
-var PageBlobGetPageRangesHeaders = {
- serializedName: "pageblob-getpageranges-headers",
+var BlobFlatListSegment = {
+ xmlName: "Blobs",
+ serializedName: "BlobFlatListSegment",
type: {
name: "Composite",
- className: "PageBlobGetPageRangesHeaders",
+ className: "BlobFlatListSegment",
modelProperties: {
- lastModified: {
- serializedName: "last-modified",
+ blobItems: {
+ xmlName: "BlobItems",
+ xmlElementName: "Blob",
+ required: true,
+ serializedName: "BlobItems",
type: {
- name: "DateTimeRfc1123"
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "BlobItemInternal"
+ }
+ }
}
- },
- etag: {
- serializedName: "etag",
+ }
+ }
+ }
+};
+var ListBlobsFlatSegmentResponse = {
+ xmlName: "EnumerationResults",
+ serializedName: "ListBlobsFlatSegmentResponse",
+ type: {
+ name: "Composite",
+ className: "ListBlobsFlatSegmentResponse",
+ modelProperties: {
+ serviceEndpoint: {
+ xmlIsAttribute: true,
+ xmlName: "ServiceEndpoint",
+ required: true,
+ serializedName: "ServiceEndpoint",
type: {
name: "String"
}
},
- blobContentLength: {
- serializedName: "x-ms-blob-content-length",
+ containerName: {
+ xmlIsAttribute: true,
+ xmlName: "ContainerName",
+ required: true,
+ serializedName: "ContainerName",
type: {
- name: "Number"
+ name: "String"
}
},
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
+ prefix: {
+ xmlName: "Prefix",
+ serializedName: "Prefix",
type: {
name: "String"
}
},
- requestId: {
- serializedName: "x-ms-request-id",
+ marker: {
+ xmlName: "Marker",
+ serializedName: "Marker",
type: {
name: "String"
}
},
- version: {
- serializedName: "x-ms-version",
+ maxPageSize: {
+ xmlName: "MaxResults",
+ serializedName: "MaxResults",
type: {
- name: "String"
+ name: "Number"
}
},
- date: {
- serializedName: "date",
+ segment: {
+ xmlName: "Blobs",
+ required: true,
+ serializedName: "Segment",
type: {
- name: "DateTimeRfc1123"
+ name: "Composite",
+ className: "BlobFlatListSegment"
}
},
- errorCode: {
- serializedName: "x-ms-error-code",
+ continuationToken: {
+ xmlName: "NextMarker",
+ serializedName: "NextMarker",
type: {
name: "String"
}
@@ -36637,113 +33415,127 @@ var PageBlobGetPageRangesHeaders = {
}
}
};
-var PageBlobGetPageRangesDiffHeaders = {
- serializedName: "pageblob-getpagerangesdiff-headers",
+var BlobPrefix = {
+ serializedName: "BlobPrefix",
type: {
name: "Composite",
- className: "PageBlobGetPageRangesDiffHeaders",
+ className: "BlobPrefix",
modelProperties: {
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- etag: {
- serializedName: "etag",
- type: {
- name: "String"
- }
- },
- blobContentLength: {
- serializedName: "x-ms-blob-content-length",
- type: {
- name: "Number"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
- }
- },
- requestId: {
- serializedName: "x-ms-request-id",
- type: {
- name: "String"
- }
- },
- version: {
- serializedName: "x-ms-version",
+ name: {
+ xmlName: "Name",
+ required: true,
+ serializedName: "Name",
type: {
name: "String"
}
- },
- date: {
- serializedName: "date",
+ }
+ }
+ }
+};
+var BlobHierarchyListSegment = {
+ xmlName: "Blobs",
+ serializedName: "BlobHierarchyListSegment",
+ type: {
+ name: "Composite",
+ className: "BlobHierarchyListSegment",
+ modelProperties: {
+ blobPrefixes: {
+ xmlName: "BlobPrefixes",
+ xmlElementName: "BlobPrefix",
+ serializedName: "BlobPrefixes",
type: {
- name: "DateTimeRfc1123"
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "BlobPrefix"
+ }
+ }
}
},
- errorCode: {
- serializedName: "x-ms-error-code",
+ blobItems: {
+ xmlName: "BlobItems",
+ xmlElementName: "Blob",
+ required: true,
+ serializedName: "BlobItems",
type: {
- name: "String"
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "BlobItemInternal"
+ }
+ }
}
}
}
}
};
-var PageBlobResizeHeaders = {
- serializedName: "pageblob-resize-headers",
+var ListBlobsHierarchySegmentResponse = {
+ xmlName: "EnumerationResults",
+ serializedName: "ListBlobsHierarchySegmentResponse",
type: {
name: "Composite",
- className: "PageBlobResizeHeaders",
+ className: "ListBlobsHierarchySegmentResponse",
modelProperties: {
- etag: {
- serializedName: "etag",
+ serviceEndpoint: {
+ xmlIsAttribute: true,
+ xmlName: "ServiceEndpoint",
+ required: true,
+ serializedName: "ServiceEndpoint",
type: {
name: "String"
}
},
- lastModified: {
- serializedName: "last-modified",
+ containerName: {
+ xmlIsAttribute: true,
+ xmlName: "ContainerName",
+ required: true,
+ serializedName: "ContainerName",
type: {
- name: "DateTimeRfc1123"
+ name: "String"
}
},
- blobSequenceNumber: {
- serializedName: "x-ms-blob-sequence-number",
+ prefix: {
+ xmlName: "Prefix",
+ serializedName: "Prefix",
type: {
- name: "Number"
+ name: "String"
}
},
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
+ marker: {
+ xmlName: "Marker",
+ serializedName: "Marker",
type: {
name: "String"
}
},
- requestId: {
- serializedName: "x-ms-request-id",
+ maxPageSize: {
+ xmlName: "MaxResults",
+ serializedName: "MaxResults",
type: {
- name: "String"
+ name: "Number"
}
},
- version: {
- serializedName: "x-ms-version",
+ delimiter: {
+ xmlName: "Delimiter",
+ serializedName: "Delimiter",
type: {
name: "String"
}
},
- date: {
- serializedName: "date",
+ segment: {
+ xmlName: "Blobs",
+ required: true,
+ serializedName: "Segment",
type: {
- name: "DateTimeRfc1123"
+ name: "Composite",
+ className: "BlobHierarchyListSegment"
}
},
- errorCode: {
- serializedName: "x-ms-error-code",
+ continuationToken: {
+ xmlName: "NextMarker",
+ serializedName: "NextMarker",
type: {
name: "String"
}
@@ -36751,218 +33543,341 @@ var PageBlobResizeHeaders = {
}
}
};
-var PageBlobUpdateSequenceNumberHeaders = {
- serializedName: "pageblob-updatesequencenumber-headers",
+var Block = {
+ serializedName: "Block",
type: {
name: "Composite",
- className: "PageBlobUpdateSequenceNumberHeaders",
+ className: "Block",
modelProperties: {
- etag: {
- serializedName: "etag",
+ name: {
+ xmlName: "Name",
+ required: true,
+ serializedName: "Name",
type: {
name: "String"
}
},
- lastModified: {
- serializedName: "last-modified",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- blobSequenceNumber: {
- serializedName: "x-ms-blob-sequence-number",
+ size: {
+ xmlName: "Size",
+ required: true,
+ serializedName: "Size",
type: {
name: "Number"
}
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
+ }
+ }
+ }
+};
+var BlockList = {
+ serializedName: "BlockList",
+ type: {
+ name: "Composite",
+ className: "BlockList",
+ modelProperties: {
+ committedBlocks: {
+ xmlIsWrapped: true,
+ xmlName: "CommittedBlocks",
+ xmlElementName: "Block",
+ serializedName: "CommittedBlocks",
type: {
- name: "String"
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "Block"
+ }
+ }
}
},
- requestId: {
- serializedName: "x-ms-request-id",
+ uncommittedBlocks: {
+ xmlIsWrapped: true,
+ xmlName: "UncommittedBlocks",
+ xmlElementName: "Block",
+ serializedName: "UncommittedBlocks",
type: {
- name: "String"
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "Block"
+ }
+ }
}
- },
- version: {
- serializedName: "x-ms-version",
+ }
+ }
+ }
+};
+var BlockLookupList = {
+ xmlName: "BlockList",
+ serializedName: "BlockLookupList",
+ type: {
+ name: "Composite",
+ className: "BlockLookupList",
+ modelProperties: {
+ committed: {
+ xmlName: "Committed",
+ xmlElementName: "Committed",
+ serializedName: "Committed",
type: {
- name: "String"
+ name: "Sequence",
+ element: {
+ type: {
+ name: "String"
+ }
+ }
}
},
- date: {
- serializedName: "date",
+ uncommitted: {
+ xmlName: "Uncommitted",
+ xmlElementName: "Uncommitted",
+ serializedName: "Uncommitted",
type: {
- name: "DateTimeRfc1123"
+ name: "Sequence",
+ element: {
+ type: {
+ name: "String"
+ }
+ }
}
},
- errorCode: {
- serializedName: "x-ms-error-code",
+ latest: {
+ xmlName: "Latest",
+ xmlElementName: "Latest",
+ serializedName: "Latest",
type: {
- name: "String"
+ name: "Sequence",
+ element: {
+ type: {
+ name: "String"
+ }
+ }
}
}
}
}
};
-var PageBlobCopyIncrementalHeaders = {
- serializedName: "pageblob-copyincremental-headers",
+var ContainerProperties = {
+ serializedName: "ContainerProperties",
type: {
name: "Composite",
- className: "PageBlobCopyIncrementalHeaders",
+ className: "ContainerProperties",
modelProperties: {
+ lastModified: {
+ xmlName: "Last-Modified",
+ required: true,
+ serializedName: "Last-Modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
etag: {
- serializedName: "etag",
+ xmlName: "Etag",
+ required: true,
+ serializedName: "Etag",
type: {
name: "String"
}
},
- lastModified: {
- serializedName: "last-modified",
+ leaseStatus: {
+ xmlName: "LeaseStatus",
+ serializedName: "LeaseStatus",
type: {
- name: "DateTimeRfc1123"
+ name: "Enum",
+ allowedValues: [
+ "locked",
+ "unlocked"
+ ]
}
},
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
+ leaseState: {
+ xmlName: "LeaseState",
+ serializedName: "LeaseState",
type: {
- name: "String"
+ name: "Enum",
+ allowedValues: [
+ "available",
+ "leased",
+ "expired",
+ "breaking",
+ "broken"
+ ]
}
},
- requestId: {
- serializedName: "x-ms-request-id",
+ leaseDuration: {
+ xmlName: "LeaseDuration",
+ serializedName: "LeaseDuration",
type: {
- name: "String"
+ name: "Enum",
+ allowedValues: [
+ "infinite",
+ "fixed"
+ ]
}
},
- version: {
- serializedName: "x-ms-version",
+ publicAccess: {
+ xmlName: "PublicAccess",
+ serializedName: "PublicAccess",
type: {
name: "String"
}
},
- date: {
- serializedName: "date",
+ hasImmutabilityPolicy: {
+ xmlName: "HasImmutabilityPolicy",
+ serializedName: "HasImmutabilityPolicy",
type: {
- name: "DateTimeRfc1123"
+ name: "Boolean"
}
},
- copyId: {
- serializedName: "x-ms-copy-id",
+ hasLegalHold: {
+ xmlName: "HasLegalHold",
+ serializedName: "HasLegalHold",
+ type: {
+ name: "Boolean"
+ }
+ },
+ defaultEncryptionScope: {
+ xmlName: "DefaultEncryptionScope",
+ serializedName: "DefaultEncryptionScope",
type: {
name: "String"
}
},
- copyStatus: {
- serializedName: "x-ms-copy-status",
+ preventEncryptionScopeOverride: {
+ xmlName: "DenyEncryptionScopeOverride",
+ serializedName: "DenyEncryptionScopeOverride",
type: {
- name: "Enum",
- allowedValues: [
- "pending",
- "success",
- "aborted",
- "failed"
- ]
+ name: "Boolean"
}
},
- errorCode: {
- serializedName: "x-ms-error-code",
+ deletedOn: {
+ xmlName: "DeletedTime",
+ serializedName: "DeletedTime",
type: {
- name: "String"
+ name: "DateTimeRfc1123"
+ }
+ },
+ remainingRetentionDays: {
+ xmlName: "RemainingRetentionDays",
+ serializedName: "RemainingRetentionDays",
+ type: {
+ name: "Number"
}
}
}
}
};
-var AppendBlobAppendBlockHeaders = {
- serializedName: "appendblob-appendblock-headers",
+var ContainerItem = {
+ xmlName: "Container",
+ serializedName: "ContainerItem",
type: {
name: "Composite",
- className: "AppendBlobAppendBlockHeaders",
+ className: "ContainerItem",
modelProperties: {
- etag: {
- serializedName: "etag",
+ name: {
+ xmlName: "Name",
+ required: true,
+ serializedName: "Name",
type: {
name: "String"
}
},
- lastModified: {
- serializedName: "last-modified",
+ deleted: {
+ xmlName: "Deleted",
+ serializedName: "Deleted",
type: {
- name: "DateTimeRfc1123"
+ name: "Boolean"
}
},
- contentMD5: {
- serializedName: "content-md5",
+ version: {
+ xmlName: "Version",
+ serializedName: "Version",
type: {
- name: "ByteArray"
+ name: "String"
}
},
- xMsContentCrc64: {
- serializedName: "x-ms-content-crc64",
+ properties: {
+ xmlName: "Properties",
+ required: true,
+ serializedName: "Properties",
type: {
- name: "ByteArray"
- }
- },
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
+ name: "Composite",
+ className: "ContainerProperties"
}
},
- requestId: {
- serializedName: "x-ms-request-id",
+ metadata: {
+ xmlName: "Metadata",
+ serializedName: "Metadata",
type: {
- name: "String"
+ name: "Dictionary",
+ value: {
+ type: {
+ name: "String"
+ }
+ }
}
- },
- version: {
- serializedName: "x-ms-version",
+ }
+ }
+ }
+};
+var DelimitedTextConfiguration = {
+ serializedName: "DelimitedTextConfiguration",
+ type: {
+ name: "Composite",
+ className: "DelimitedTextConfiguration",
+ modelProperties: {
+ columnSeparator: {
+ xmlName: "ColumnSeparator",
+ required: true,
+ serializedName: "ColumnSeparator",
type: {
name: "String"
}
},
- date: {
- serializedName: "date",
- type: {
- name: "DateTimeRfc1123"
- }
- },
- blobAppendOffset: {
- serializedName: "x-ms-blob-append-offset",
+ fieldQuote: {
+ xmlName: "FieldQuote",
+ required: true,
+ serializedName: "FieldQuote",
type: {
name: "String"
}
},
- blobCommittedBlockCount: {
- serializedName: "x-ms-blob-committed-block-count",
- type: {
- name: "Number"
- }
- },
- isServerEncrypted: {
- serializedName: "x-ms-request-server-encrypted",
+ recordSeparator: {
+ xmlName: "RecordSeparator",
+ required: true,
+ serializedName: "RecordSeparator",
type: {
- name: "Boolean"
+ name: "String"
}
},
- encryptionKeySha256: {
- serializedName: "x-ms-encryption-key-sha256",
+ escapeChar: {
+ xmlName: "EscapeChar",
+ required: true,
+ serializedName: "EscapeChar",
type: {
name: "String"
}
},
- encryptionScope: {
- serializedName: "x-ms-encryption-scope",
+ headersPresent: {
+ xmlName: "HasHeaders",
+ required: true,
+ serializedName: "HeadersPresent",
type: {
- name: "String"
+ name: "Boolean"
}
- },
- errorCode: {
- serializedName: "x-ms-error-code",
+ }
+ }
+ }
+};
+var JsonTextConfiguration = {
+ serializedName: "JsonTextConfiguration",
+ type: {
+ name: "Composite",
+ className: "JsonTextConfiguration",
+ modelProperties: {
+ recordSeparator: {
+ xmlName: "RecordSeparator",
+ required: true,
+ serializedName: "RecordSeparator",
type: {
name: "String"
}
@@ -36970,86 +33885,148 @@ var AppendBlobAppendBlockHeaders = {
}
}
};
-var AppendBlobAppendBlockFromUrlHeaders = {
- serializedName: "appendblob-appendblockfromurl-headers",
+var ListContainersSegmentResponse = {
+ xmlName: "EnumerationResults",
+ serializedName: "ListContainersSegmentResponse",
type: {
name: "Composite",
- className: "AppendBlobAppendBlockFromUrlHeaders",
+ className: "ListContainersSegmentResponse",
modelProperties: {
- etag: {
- serializedName: "etag",
+ serviceEndpoint: {
+ xmlIsAttribute: true,
+ xmlName: "ServiceEndpoint",
+ required: true,
+ serializedName: "ServiceEndpoint",
type: {
name: "String"
}
},
- lastModified: {
- serializedName: "last-modified",
+ prefix: {
+ xmlName: "Prefix",
+ serializedName: "Prefix",
type: {
- name: "DateTimeRfc1123"
+ name: "String"
}
},
- contentMD5: {
- serializedName: "content-md5",
+ marker: {
+ xmlName: "Marker",
+ serializedName: "Marker",
type: {
- name: "ByteArray"
+ name: "String"
}
},
- xMsContentCrc64: {
- serializedName: "x-ms-content-crc64",
+ maxPageSize: {
+ xmlName: "MaxResults",
+ serializedName: "MaxResults",
type: {
- name: "ByteArray"
+ name: "Number"
}
},
- requestId: {
- serializedName: "x-ms-request-id",
+ containerItems: {
+ xmlIsWrapped: true,
+ xmlName: "Containers",
+ xmlElementName: "Container",
+ required: true,
+ serializedName: "ContainerItems",
type: {
- name: "String"
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "ContainerItem"
+ }
+ }
}
},
- version: {
- serializedName: "x-ms-version",
+ continuationToken: {
+ xmlName: "NextMarker",
+ serializedName: "NextMarker",
type: {
name: "String"
}
- },
- date: {
- serializedName: "date",
+ }
+ }
+ }
+};
+var CorsRule = {
+ serializedName: "CorsRule",
+ type: {
+ name: "Composite",
+ className: "CorsRule",
+ modelProperties: {
+ allowedOrigins: {
+ xmlName: "AllowedOrigins",
+ required: true,
+ serializedName: "AllowedOrigins",
type: {
- name: "DateTimeRfc1123"
+ name: "String"
}
},
- blobAppendOffset: {
- serializedName: "x-ms-blob-append-offset",
+ allowedMethods: {
+ xmlName: "AllowedMethods",
+ required: true,
+ serializedName: "AllowedMethods",
type: {
name: "String"
}
},
- blobCommittedBlockCount: {
- serializedName: "x-ms-blob-committed-block-count",
+ allowedHeaders: {
+ xmlName: "AllowedHeaders",
+ required: true,
+ serializedName: "AllowedHeaders",
type: {
- name: "Number"
+ name: "String"
}
},
- encryptionKeySha256: {
- serializedName: "x-ms-encryption-key-sha256",
+ exposedHeaders: {
+ xmlName: "ExposedHeaders",
+ required: true,
+ serializedName: "ExposedHeaders",
type: {
name: "String"
}
},
- encryptionScope: {
- serializedName: "x-ms-encryption-scope",
+ maxAgeInSeconds: {
+ xmlName: "MaxAgeInSeconds",
+ required: true,
+ serializedName: "MaxAgeInSeconds",
+ constraints: {
+ InclusiveMinimum: 0
+ },
+ type: {
+ name: "Number"
+ }
+ }
+ }
+ }
+};
+var FilterBlobItem = {
+ xmlName: "Blob",
+ serializedName: "FilterBlobItem",
+ type: {
+ name: "Composite",
+ className: "FilterBlobItem",
+ modelProperties: {
+ name: {
+ xmlName: "Name",
+ required: true,
+ serializedName: "Name",
type: {
name: "String"
}
},
- isServerEncrypted: {
- serializedName: "x-ms-request-server-encrypted",
+ containerName: {
+ xmlName: "ContainerName",
+ required: true,
+ serializedName: "ContainerName",
type: {
- name: "Boolean"
+ name: "String"
}
},
- errorCode: {
- serializedName: "x-ms-error-code",
+ tagValue: {
+ xmlName: "TagValue",
+ required: true,
+ serializedName: "TagValue",
type: {
name: "String"
}
@@ -37057,233 +34034,541 @@ var AppendBlobAppendBlockFromUrlHeaders = {
}
}
};
-var AppendBlobSealHeaders = {
- serializedName: "appendblob-seal-headers",
+var FilterBlobSegment = {
+ xmlName: "EnumerationResults",
+ serializedName: "FilterBlobSegment",
type: {
name: "Composite",
- className: "AppendBlobSealHeaders",
+ className: "FilterBlobSegment",
modelProperties: {
- etag: {
- serializedName: "etag",
+ serviceEndpoint: {
+ xmlIsAttribute: true,
+ xmlName: "ServiceEndpoint",
+ required: true,
+ serializedName: "ServiceEndpoint",
type: {
name: "String"
}
},
- lastModified: {
- serializedName: "last-modified",
+ where: {
+ xmlName: "Where",
+ required: true,
+ serializedName: "Where",
type: {
- name: "DateTimeRfc1123"
+ name: "String"
}
},
- clientRequestId: {
- serializedName: "x-ms-client-request-id",
+ blobs: {
+ xmlIsWrapped: true,
+ xmlName: "Blobs",
+ xmlElementName: "Blob",
+ required: true,
+ serializedName: "Blobs",
type: {
- name: "String"
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "FilterBlobItem"
+ }
+ }
}
},
- requestId: {
- serializedName: "x-ms-request-id",
+ continuationToken: {
+ xmlName: "NextMarker",
+ serializedName: "NextMarker",
type: {
name: "String"
}
- },
- version: {
- serializedName: "x-ms-version",
+ }
+ }
+ }
+};
+var GeoReplication = {
+ serializedName: "GeoReplication",
+ type: {
+ name: "Composite",
+ className: "GeoReplication",
+ modelProperties: {
+ status: {
+ xmlName: "Status",
+ required: true,
+ serializedName: "Status",
type: {
name: "String"
}
},
- date: {
- serializedName: "date",
+ lastSyncOn: {
+ xmlName: "LastSyncTime",
+ required: true,
+ serializedName: "LastSyncTime",
type: {
name: "DateTimeRfc1123"
}
- },
- isSealed: {
- serializedName: "x-ms-blob-sealed",
- type: {
- name: "Boolean"
- }
- },
- errorCode: {
- serializedName: "x-ms-error-code",
- type: {
- name: "String"
- }
}
}
}
};
-var BlobQueryHeaders = {
- serializedName: "blob-query-headers",
+var RetentionPolicy = {
+ serializedName: "RetentionPolicy",
type: {
name: "Composite",
- className: "BlobQueryHeaders",
+ className: "RetentionPolicy",
modelProperties: {
- lastModified: {
- serializedName: "last-modified",
+ enabled: {
+ xmlName: "Enabled",
+ required: true,
+ serializedName: "Enabled",
type: {
- name: "DateTimeRfc1123"
+ name: "Boolean"
}
},
- metadata: {
- serializedName: "x-ms-meta",
- type: {
- name: "Dictionary",
- value: {
- type: {
- name: "String"
- }
- }
+ days: {
+ xmlName: "Days",
+ serializedName: "Days",
+ constraints: {
+ InclusiveMinimum: 1
},
- headerCollectionPrefix: "x-ms-meta-"
- },
- contentLength: {
- serializedName: "content-length",
type: {
name: "Number"
}
- },
- contentType: {
- serializedName: "content-type",
+ }
+ }
+ }
+};
+var Logging = {
+ serializedName: "Logging",
+ type: {
+ name: "Composite",
+ className: "Logging",
+ modelProperties: {
+ version: {
+ xmlName: "Version",
+ required: true,
+ serializedName: "Version",
type: {
name: "String"
}
},
- contentRange: {
- serializedName: "content-range",
+ deleteProperty: {
+ xmlName: "Delete",
+ required: true,
+ serializedName: "Delete",
type: {
- name: "String"
+ name: "Boolean"
}
},
- etag: {
- serializedName: "etag",
+ read: {
+ xmlName: "Read",
+ required: true,
+ serializedName: "Read",
type: {
- name: "String"
+ name: "Boolean"
}
},
- contentMD5: {
- serializedName: "content-md5",
+ write: {
+ xmlName: "Write",
+ required: true,
+ serializedName: "Write",
type: {
- name: "ByteArray"
+ name: "Boolean"
}
},
- contentEncoding: {
- serializedName: "content-encoding",
+ retentionPolicy: {
+ xmlName: "RetentionPolicy",
+ required: true,
+ serializedName: "RetentionPolicy",
type: {
- name: "String"
+ name: "Composite",
+ className: "RetentionPolicy"
}
- },
- cacheControl: {
- serializedName: "cache-control",
+ }
+ }
+ }
+};
+var Metrics = {
+ serializedName: "Metrics",
+ type: {
+ name: "Composite",
+ className: "Metrics",
+ modelProperties: {
+ version: {
+ xmlName: "Version",
+ serializedName: "Version",
type: {
name: "String"
}
},
- contentDisposition: {
- serializedName: "content-disposition",
+ enabled: {
+ xmlName: "Enabled",
+ required: true,
+ serializedName: "Enabled",
type: {
- name: "String"
+ name: "Boolean"
}
},
- contentLanguage: {
- serializedName: "content-language",
+ includeAPIs: {
+ xmlName: "IncludeAPIs",
+ serializedName: "IncludeAPIs",
type: {
- name: "String"
+ name: "Boolean"
}
},
- blobSequenceNumber: {
- serializedName: "x-ms-blob-sequence-number",
+ retentionPolicy: {
+ xmlName: "RetentionPolicy",
+ serializedName: "RetentionPolicy",
type: {
- name: "Number"
+ name: "Composite",
+ className: "RetentionPolicy"
}
- },
- blobType: {
- serializedName: "x-ms-blob-type",
+ }
+ }
+ }
+};
+var PageRange = {
+ serializedName: "PageRange",
+ type: {
+ name: "Composite",
+ className: "PageRange",
+ modelProperties: {
+ start: {
+ xmlName: "Start",
+ required: true,
+ serializedName: "Start",
type: {
- name: "Enum",
- allowedValues: [
- "BlockBlob",
- "PageBlob",
- "AppendBlob"
- ]
+ name: "Number"
}
},
- copyCompletionTime: {
- serializedName: "x-ms-copy-completion-time",
+ end: {
+ xmlName: "End",
+ required: true,
+ serializedName: "End",
type: {
- name: "DateTimeRfc1123"
+ name: "Number"
}
- },
- copyStatusDescription: {
- serializedName: "x-ms-copy-status-description",
+ }
+ }
+ }
+};
+var ClearRange = {
+ serializedName: "ClearRange",
+ type: {
+ name: "Composite",
+ className: "ClearRange",
+ modelProperties: {
+ start: {
+ xmlName: "Start",
+ required: true,
+ serializedName: "Start",
type: {
- name: "String"
+ name: "Number"
}
},
- copyId: {
- serializedName: "x-ms-copy-id",
+ end: {
+ xmlName: "End",
+ required: true,
+ serializedName: "End",
type: {
- name: "String"
+ name: "Number"
}
- },
- copyProgress: {
- serializedName: "x-ms-copy-progress",
+ }
+ }
+ }
+};
+var PageList = {
+ serializedName: "PageList",
+ type: {
+ name: "Composite",
+ className: "PageList",
+ modelProperties: {
+ pageRange: {
+ xmlName: "PageRange",
+ xmlElementName: "PageRange",
+ serializedName: "PageRange",
type: {
- name: "String"
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "PageRange"
+ }
+ }
}
},
- copySource: {
- serializedName: "x-ms-copy-source",
+ clearRange: {
+ xmlName: "ClearRange",
+ xmlElementName: "ClearRange",
+ serializedName: "ClearRange",
type: {
- name: "String"
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "ClearRange"
+ }
+ }
}
- },
- copyStatus: {
- serializedName: "x-ms-copy-status",
+ }
+ }
+ }
+};
+var QueryFormat = {
+ serializedName: "QueryFormat",
+ type: {
+ name: "Composite",
+ className: "QueryFormat",
+ modelProperties: {
+ type: {
+ xmlName: "Type",
+ serializedName: "Type",
type: {
name: "Enum",
allowedValues: [
- "pending",
- "success",
- "aborted",
- "failed"
+ "delimited",
+ "json"
]
}
},
- leaseDuration: {
- serializedName: "x-ms-lease-duration",
+ delimitedTextConfiguration: {
+ xmlName: "DelimitedTextConfiguration",
+ serializedName: "DelimitedTextConfiguration",
type: {
- name: "Enum",
- allowedValues: [
- "infinite",
- "fixed"
- ]
+ name: "Composite",
+ className: "DelimitedTextConfiguration"
}
},
- leaseState: {
- serializedName: "x-ms-lease-state",
+ jsonTextConfiguration: {
+ xmlName: "JsonTextConfiguration",
+ serializedName: "JsonTextConfiguration",
type: {
- name: "Enum",
- allowedValues: [
- "available",
- "leased",
- "expired",
- "breaking",
- "broken"
- ]
+ name: "Composite",
+ className: "JsonTextConfiguration"
+ }
+ }
+ }
+ }
+};
+var QuerySerialization = {
+ serializedName: "QuerySerialization",
+ type: {
+ name: "Composite",
+ className: "QuerySerialization",
+ modelProperties: {
+ format: {
+ xmlName: "Format",
+ required: true,
+ serializedName: "Format",
+ type: {
+ name: "Composite",
+ className: "QueryFormat"
+ }
+ }
+ }
+ }
+};
+var QueryRequest = {
+ serializedName: "QueryRequest",
+ type: {
+ name: "Composite",
+ className: "QueryRequest",
+ modelProperties: {
+ queryType: {
+ xmlName: "QueryType",
+ required: true,
+ isConstant: true,
+ serializedName: "QueryType",
+ defaultValue: 'SQL',
+ type: {
+ name: "String"
}
},
- leaseStatus: {
- serializedName: "x-ms-lease-status",
+ expression: {
+ xmlName: "Expression",
+ required: true,
+ serializedName: "Expression",
type: {
- name: "Enum",
- allowedValues: [
- "locked",
- "unlocked"
- ]
+ name: "String"
+ }
+ },
+ inputSerialization: {
+ xmlName: "InputSerialization",
+ serializedName: "InputSerialization",
+ type: {
+ name: "Composite",
+ className: "QuerySerialization"
+ }
+ },
+ outputSerialization: {
+ xmlName: "OutputSerialization",
+ serializedName: "OutputSerialization",
+ type: {
+ name: "Composite",
+ className: "QuerySerialization"
+ }
+ }
+ }
+ }
+};
+var SignedIdentifier = {
+ serializedName: "SignedIdentifier",
+ type: {
+ name: "Composite",
+ className: "SignedIdentifier",
+ modelProperties: {
+ id: {
+ xmlName: "Id",
+ required: true,
+ serializedName: "Id",
+ type: {
+ name: "String"
+ }
+ },
+ accessPolicy: {
+ xmlName: "AccessPolicy",
+ required: true,
+ serializedName: "AccessPolicy",
+ type: {
+ name: "Composite",
+ className: "AccessPolicy"
+ }
+ }
+ }
+ }
+};
+var StaticWebsite = {
+ serializedName: "StaticWebsite",
+ type: {
+ name: "Composite",
+ className: "StaticWebsite",
+ modelProperties: {
+ enabled: {
+ xmlName: "Enabled",
+ required: true,
+ serializedName: "Enabled",
+ type: {
+ name: "Boolean"
+ }
+ },
+ indexDocument: {
+ xmlName: "IndexDocument",
+ serializedName: "IndexDocument",
+ type: {
+ name: "String"
+ }
+ },
+ errorDocument404Path: {
+ xmlName: "ErrorDocument404Path",
+ serializedName: "ErrorDocument404Path",
+ type: {
+ name: "String"
+ }
+ },
+ defaultIndexDocumentPath: {
+ xmlName: "DefaultIndexDocumentPath",
+ serializedName: "DefaultIndexDocumentPath",
+ type: {
+ name: "String"
+ }
+ }
+ }
+ }
+};
+var BlobServiceProperties = {
+ xmlName: "StorageServiceProperties",
+ serializedName: "BlobServiceProperties",
+ type: {
+ name: "Composite",
+ className: "BlobServiceProperties",
+ modelProperties: {
+ blobAnalyticsLogging: {
+ xmlName: "Logging",
+ serializedName: "Logging",
+ type: {
+ name: "Composite",
+ className: "Logging"
+ }
+ },
+ hourMetrics: {
+ xmlName: "HourMetrics",
+ serializedName: "HourMetrics",
+ type: {
+ name: "Composite",
+ className: "Metrics"
+ }
+ },
+ minuteMetrics: {
+ xmlName: "MinuteMetrics",
+ serializedName: "MinuteMetrics",
+ type: {
+ name: "Composite",
+ className: "Metrics"
+ }
+ },
+ cors: {
+ xmlIsWrapped: true,
+ xmlName: "Cors",
+ xmlElementName: "CorsRule",
+ serializedName: "Cors",
+ type: {
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "CorsRule"
+ }
+ }
+ }
+ },
+ defaultServiceVersion: {
+ xmlName: "DefaultServiceVersion",
+ serializedName: "DefaultServiceVersion",
+ type: {
+ name: "String"
+ }
+ },
+ deleteRetentionPolicy: {
+ xmlName: "DeleteRetentionPolicy",
+ serializedName: "DeleteRetentionPolicy",
+ type: {
+ name: "Composite",
+ className: "RetentionPolicy"
}
},
+ staticWebsite: {
+ xmlName: "StaticWebsite",
+ serializedName: "StaticWebsite",
+ type: {
+ name: "Composite",
+ className: "StaticWebsite"
+ }
+ }
+ }
+ }
+};
+var BlobServiceStatistics = {
+ xmlName: "StorageServiceStats",
+ serializedName: "BlobServiceStatistics",
+ type: {
+ name: "Composite",
+ className: "BlobServiceStatistics",
+ modelProperties: {
+ geoReplication: {
+ xmlName: "GeoReplication",
+ serializedName: "GeoReplication",
+ type: {
+ name: "Composite",
+ className: "GeoReplication"
+ }
+ }
+ }
+ }
+};
+var ServiceSetPropertiesHeaders = {
+ serializedName: "service-setproperties-headers",
+ type: {
+ name: "Composite",
+ className: "ServiceSetPropertiesHeaders",
+ modelProperties: {
clientRequestId: {
serializedName: "x-ms-client-request-id",
type: {
@@ -37302,8 +34587,68 @@ var BlobQueryHeaders = {
name: "String"
}
},
- acceptRanges: {
- serializedName: "accept-ranges",
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
+ }
+ }
+};
+var ServiceGetPropertiesHeaders = {
+ serializedName: "service-getproperties-headers",
+ type: {
+ name: "Composite",
+ className: "ServiceGetPropertiesHeaders",
+ modelProperties: {
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
+ }
+ }
+};
+var ServiceGetStatisticsHeaders = {
+ serializedName: "service-getstatistics-headers",
+ type: {
+ name: "Composite",
+ className: "ServiceGetStatisticsHeaders",
+ modelProperties: {
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
type: {
name: "String"
}
@@ -37314,40 +34659,76 @@ var BlobQueryHeaders = {
name: "DateTimeRfc1123"
}
},
- blobCommittedBlockCount: {
- serializedName: "x-ms-blob-committed-block-count",
+ errorCode: {
+ serializedName: "x-ms-error-code",
type: {
- name: "Number"
+ name: "String"
+ }
+ }
+ }
+ }
+};
+var ServiceListContainersSegmentHeaders = {
+ serializedName: "service-listcontainerssegment-headers",
+ type: {
+ name: "Composite",
+ className: "ServiceListContainersSegmentHeaders",
+ modelProperties: {
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
}
},
- isServerEncrypted: {
- serializedName: "x-ms-server-encrypted",
+ requestId: {
+ serializedName: "x-ms-request-id",
type: {
- name: "Boolean"
+ name: "String"
}
},
- encryptionKeySha256: {
- serializedName: "x-ms-encryption-key-sha256",
+ version: {
+ serializedName: "x-ms-version",
type: {
name: "String"
}
},
- encryptionScope: {
- serializedName: "x-ms-encryption-scope",
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
+ }
+ }
+};
+var ServiceGetUserDelegationKeyHeaders = {
+ serializedName: "service-getuserdelegationkey-headers",
+ type: {
+ name: "Composite",
+ className: "ServiceGetUserDelegationKeyHeaders",
+ modelProperties: {
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
type: {
name: "String"
}
},
- blobContentMD5: {
- serializedName: "x-ms-blob-content-md5",
+ requestId: {
+ serializedName: "x-ms-request-id",
type: {
- name: "ByteArray"
+ name: "String"
}
},
- contentCrc64: {
- serializedName: "x-ms-content-crc64",
+ version: {
+ serializedName: "x-ms-version",
type: {
- name: "ByteArray"
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
}
},
errorCode: {
@@ -37359,11 +34740,11 @@ var BlobQueryHeaders = {
}
}
};
-var BlobGetTagsHeaders = {
- serializedName: "blob-gettags-headers",
+var ServiceGetAccountInfoHeaders = {
+ serializedName: "service-getaccountinfo-headers",
type: {
name: "Composite",
- className: "BlobGetTagsHeaders",
+ className: "ServiceGetAccountInfoHeaders",
modelProperties: {
clientRequestId: {
serializedName: "x-ms-client-request-id",
@@ -37389,6 +34770,32 @@ var BlobGetTagsHeaders = {
name: "DateTimeRfc1123"
}
},
+ skuName: {
+ serializedName: "x-ms-sku-name",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "Standard_LRS",
+ "Standard_GRS",
+ "Standard_RAGRS",
+ "Standard_ZRS",
+ "Premium_LRS"
+ ]
+ }
+ },
+ accountKind: {
+ serializedName: "x-ms-account-kind",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "Storage",
+ "BlobStorage",
+ "StorageV2",
+ "FileStorage",
+ "BlockBlobStorage"
+ ]
+ }
+ },
errorCode: {
serializedName: "x-ms-error-code",
type: {
@@ -37398,11 +34805,50 @@ var BlobGetTagsHeaders = {
}
}
};
-var BlobSetTagsHeaders = {
- serializedName: "blob-settags-headers",
+var ServiceSubmitBatchHeaders = {
+ serializedName: "service-submitbatch-headers",
type: {
name: "Composite",
- className: "BlobSetTagsHeaders",
+ className: "ServiceSubmitBatchHeaders",
+ modelProperties: {
+ contentType: {
+ serializedName: "content-type",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
+ }
+ }
+};
+var ServiceFilterBlobsHeaders = {
+ serializedName: "service-filterblobs-headers",
+ type: {
+ name: "Composite",
+ className: "ServiceFilterBlobsHeaders",
modelProperties: {
clientRequestId: {
serializedName: "x-ms-client-request-id",
@@ -37437,4210 +34883,6815 @@ var BlobSetTagsHeaders = {
}
}
};
-
-/*
- * Copyright (c) Microsoft Corporation. All rights reserved.
- * Licensed under the MIT License. See License.txt in the project root for license information.
- *
- * Code generated by Microsoft (R) AutoRest Code Generator.
- * Changes may cause incorrect behavior and will be lost if the code is regenerated.
- */
-
-var Mappers = /*#__PURE__*/Object.freeze({
- __proto__: null,
- BlobServiceProperties: BlobServiceProperties,
- BlobServiceStatistics: BlobServiceStatistics,
- ContainerItem: ContainerItem,
- ContainerProperties: ContainerProperties,
- CorsRule: CorsRule,
- FilterBlobItem: FilterBlobItem,
- FilterBlobSegment: FilterBlobSegment,
- GeoReplication: GeoReplication,
- KeyInfo: KeyInfo,
- ListContainersSegmentResponse: ListContainersSegmentResponse,
- Logging: Logging,
- Metrics: Metrics,
- RetentionPolicy: RetentionPolicy,
- ServiceFilterBlobsHeaders: ServiceFilterBlobsHeaders,
- ServiceGetAccountInfoHeaders: ServiceGetAccountInfoHeaders,
- ServiceGetPropertiesHeaders: ServiceGetPropertiesHeaders,
- ServiceGetStatisticsHeaders: ServiceGetStatisticsHeaders,
- ServiceGetUserDelegationKeyHeaders: ServiceGetUserDelegationKeyHeaders,
- ServiceListContainersSegmentHeaders: ServiceListContainersSegmentHeaders,
- ServiceSetPropertiesHeaders: ServiceSetPropertiesHeaders,
- ServiceSubmitBatchHeaders: ServiceSubmitBatchHeaders,
- StaticWebsite: StaticWebsite,
- StorageError: StorageError,
- UserDelegationKey: UserDelegationKey
-});
-
-/*
- * Copyright (c) Microsoft Corporation. All rights reserved.
- * Licensed under the MIT License. See License.txt in the project root for
- * license information.
- *
- * Code generated by Microsoft (R) AutoRest Code Generator.
- * Changes may cause incorrect behavior and will be lost if the code is
- * regenerated.
- */
-var access = {
- parameterPath: [
- "options",
- "access"
- ],
- mapper: {
- serializedName: "x-ms-blob-public-access",
- type: {
- name: "String"
- }
- }
-};
-var action0 = {
- parameterPath: "action",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "x-ms-lease-action",
- defaultValue: 'acquire',
- type: {
- name: "String"
+var ContainerCreateHeaders = {
+ serializedName: "container-create-headers",
+ type: {
+ name: "Composite",
+ className: "ContainerCreateHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var action1 = {
- parameterPath: "action",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "x-ms-lease-action",
- defaultValue: 'release',
- type: {
- name: "String"
+var ContainerGetPropertiesHeaders = {
+ serializedName: "container-getproperties-headers",
+ type: {
+ name: "Composite",
+ className: "ContainerGetPropertiesHeaders",
+ modelProperties: {
+ metadata: {
+ serializedName: "x-ms-meta",
+ type: {
+ name: "Dictionary",
+ value: {
+ type: {
+ name: "String"
+ }
+ }
+ },
+ headerCollectionPrefix: "x-ms-meta-"
+ },
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ leaseDuration: {
+ serializedName: "x-ms-lease-duration",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "infinite",
+ "fixed"
+ ]
+ }
+ },
+ leaseState: {
+ serializedName: "x-ms-lease-state",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "available",
+ "leased",
+ "expired",
+ "breaking",
+ "broken"
+ ]
+ }
+ },
+ leaseStatus: {
+ serializedName: "x-ms-lease-status",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "locked",
+ "unlocked"
+ ]
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ blobPublicAccess: {
+ serializedName: "x-ms-blob-public-access",
+ type: {
+ name: "String"
+ }
+ },
+ hasImmutabilityPolicy: {
+ serializedName: "x-ms-has-immutability-policy",
+ type: {
+ name: "Boolean"
+ }
+ },
+ hasLegalHold: {
+ serializedName: "x-ms-has-legal-hold",
+ type: {
+ name: "Boolean"
+ }
+ },
+ defaultEncryptionScope: {
+ serializedName: "x-ms-default-encryption-scope",
+ type: {
+ name: "String"
+ }
+ },
+ denyEncryptionScopeOverride: {
+ serializedName: "x-ms-deny-encryption-scope-override",
+ type: {
+ name: "Boolean"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var action2 = {
- parameterPath: "action",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "x-ms-lease-action",
- defaultValue: 'renew',
- type: {
- name: "String"
+var ContainerDeleteHeaders = {
+ serializedName: "container-delete-headers",
+ type: {
+ name: "Composite",
+ className: "ContainerDeleteHeaders",
+ modelProperties: {
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var action3 = {
- parameterPath: "action",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "x-ms-lease-action",
- defaultValue: 'break',
- type: {
- name: "String"
+var ContainerSetMetadataHeaders = {
+ serializedName: "container-setmetadata-headers",
+ type: {
+ name: "Composite",
+ className: "ContainerSetMetadataHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var action4 = {
- parameterPath: "action",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "x-ms-lease-action",
- defaultValue: 'change',
- type: {
- name: "String"
+var ContainerGetAccessPolicyHeaders = {
+ serializedName: "container-getaccesspolicy-headers",
+ type: {
+ name: "Composite",
+ className: "ContainerGetAccessPolicyHeaders",
+ modelProperties: {
+ blobPublicAccess: {
+ serializedName: "x-ms-blob-public-access",
+ type: {
+ name: "String"
+ }
+ },
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var action5 = {
- parameterPath: "action",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "action",
- defaultValue: 'setAccessControl',
- type: {
- name: "String"
+var ContainerSetAccessPolicyHeaders = {
+ serializedName: "container-setaccesspolicy-headers",
+ type: {
+ name: "Composite",
+ className: "ContainerSetAccessPolicyHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var action6 = {
- parameterPath: "action",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "action",
- defaultValue: 'getAccessControl',
- type: {
- name: "String"
+var ContainerRestoreHeaders = {
+ serializedName: "container-restore-headers",
+ type: {
+ name: "Composite",
+ className: "ContainerRestoreHeaders",
+ modelProperties: {
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var appendPosition = {
- parameterPath: [
- "options",
- "appendPositionAccessConditions",
- "appendPosition"
- ],
- mapper: {
- serializedName: "x-ms-blob-condition-appendpos",
- type: {
- name: "Number"
- }
- }
-};
-var blobCacheControl = {
- parameterPath: [
- "options",
- "blobHTTPHeaders",
- "blobCacheControl"
- ],
- mapper: {
- serializedName: "x-ms-blob-cache-control",
- type: {
- name: "String"
- }
- }
-};
-var blobContentDisposition = {
- parameterPath: [
- "options",
- "blobHTTPHeaders",
- "blobContentDisposition"
- ],
- mapper: {
- serializedName: "x-ms-blob-content-disposition",
- type: {
- name: "String"
+var ContainerAcquireLeaseHeaders = {
+ serializedName: "container-acquirelease-headers",
+ type: {
+ name: "Composite",
+ className: "ContainerAcquireLeaseHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ leaseId: {
+ serializedName: "x-ms-lease-id",
+ type: {
+ name: "String"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var blobContentEncoding = {
- parameterPath: [
- "options",
- "blobHTTPHeaders",
- "blobContentEncoding"
- ],
- mapper: {
- serializedName: "x-ms-blob-content-encoding",
- type: {
- name: "String"
+var ContainerReleaseLeaseHeaders = {
+ serializedName: "container-releaselease-headers",
+ type: {
+ name: "Composite",
+ className: "ContainerReleaseLeaseHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var blobContentLanguage = {
- parameterPath: [
- "options",
- "blobHTTPHeaders",
- "blobContentLanguage"
- ],
- mapper: {
- serializedName: "x-ms-blob-content-language",
- type: {
- name: "String"
+var ContainerRenewLeaseHeaders = {
+ serializedName: "container-renewlease-headers",
+ type: {
+ name: "Composite",
+ className: "ContainerRenewLeaseHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ leaseId: {
+ serializedName: "x-ms-lease-id",
+ type: {
+ name: "String"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var blobContentLength = {
- parameterPath: "blobContentLength",
- mapper: {
- required: true,
- serializedName: "x-ms-blob-content-length",
- type: {
- name: "Number"
+var ContainerBreakLeaseHeaders = {
+ serializedName: "container-breaklease-headers",
+ type: {
+ name: "Composite",
+ className: "ContainerBreakLeaseHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ leaseTime: {
+ serializedName: "x-ms-lease-time",
+ type: {
+ name: "Number"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var blobContentMD5 = {
- parameterPath: [
- "options",
- "blobHTTPHeaders",
- "blobContentMD5"
- ],
- mapper: {
- serializedName: "x-ms-blob-content-md5",
- type: {
- name: "ByteArray"
+var ContainerChangeLeaseHeaders = {
+ serializedName: "container-changelease-headers",
+ type: {
+ name: "Composite",
+ className: "ContainerChangeLeaseHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ leaseId: {
+ serializedName: "x-ms-lease-id",
+ type: {
+ name: "String"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var blobContentType = {
- parameterPath: [
- "options",
- "blobHTTPHeaders",
- "blobContentType"
- ],
- mapper: {
- serializedName: "x-ms-blob-content-type",
- type: {
- name: "String"
+var ContainerListBlobFlatSegmentHeaders = {
+ serializedName: "container-listblobflatsegment-headers",
+ type: {
+ name: "Composite",
+ className: "ContainerListBlobFlatSegmentHeaders",
+ modelProperties: {
+ contentType: {
+ serializedName: "content-type",
+ type: {
+ name: "String"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var blobSequenceNumber = {
- parameterPath: [
- "options",
- "blobSequenceNumber"
- ],
- mapper: {
- serializedName: "x-ms-blob-sequence-number",
- defaultValue: 0,
- type: {
- name: "Number"
+var ContainerListBlobHierarchySegmentHeaders = {
+ serializedName: "container-listblobhierarchysegment-headers",
+ type: {
+ name: "Composite",
+ className: "ContainerListBlobHierarchySegmentHeaders",
+ modelProperties: {
+ contentType: {
+ serializedName: "content-type",
+ type: {
+ name: "String"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var blobTagsString = {
- parameterPath: [
- "options",
- "blobTagsString"
- ],
- mapper: {
- serializedName: "x-ms-tags",
- type: {
- name: "String"
+var ContainerGetAccountInfoHeaders = {
+ serializedName: "container-getaccountinfo-headers",
+ type: {
+ name: "Composite",
+ className: "ContainerGetAccountInfoHeaders",
+ modelProperties: {
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ skuName: {
+ serializedName: "x-ms-sku-name",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "Standard_LRS",
+ "Standard_GRS",
+ "Standard_RAGRS",
+ "Standard_ZRS",
+ "Premium_LRS"
+ ]
+ }
+ },
+ accountKind: {
+ serializedName: "x-ms-account-kind",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "Storage",
+ "BlobStorage",
+ "StorageV2",
+ "FileStorage",
+ "BlockBlobStorage"
+ ]
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var blobType0 = {
- parameterPath: "blobType",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "x-ms-blob-type",
- defaultValue: 'PageBlob',
- type: {
- name: "String"
- }
- }
-};
-var blobType1 = {
- parameterPath: "blobType",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "x-ms-blob-type",
- defaultValue: 'AppendBlob',
- type: {
- name: "String"
- }
- }
-};
-var blobType2 = {
- parameterPath: "blobType",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "x-ms-blob-type",
- defaultValue: 'BlockBlob',
- type: {
- name: "String"
- }
- }
-};
-var blockId = {
- parameterPath: "blockId",
- mapper: {
- required: true,
- serializedName: "blockid",
- type: {
- name: "String"
- }
- }
-};
-var breakPeriod = {
- parameterPath: [
- "options",
- "breakPeriod"
- ],
- mapper: {
- serializedName: "x-ms-lease-break-period",
- type: {
- name: "Number"
- }
- }
-};
-var cacheControl = {
- parameterPath: [
- "options",
- "directoryHttpHeaders",
- "cacheControl"
- ],
- mapper: {
- serializedName: "x-ms-cache-control",
- type: {
- name: "String"
- }
- }
-};
-var comp0 = {
- parameterPath: "comp",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "comp",
- defaultValue: 'properties',
- type: {
- name: "String"
- }
- }
-};
-var comp1 = {
- parameterPath: "comp",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "comp",
- defaultValue: 'stats',
- type: {
- name: "String"
- }
- }
-};
-var comp10 = {
- parameterPath: "comp",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "comp",
- defaultValue: 'expiry',
- type: {
- name: "String"
- }
- }
-};
-var comp11 = {
- parameterPath: "comp",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "comp",
- defaultValue: 'snapshot',
- type: {
- name: "String"
- }
- }
-};
-var comp12 = {
- parameterPath: "comp",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "comp",
- defaultValue: 'copy',
- type: {
- name: "String"
- }
- }
-};
-var comp13 = {
- parameterPath: "comp",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "comp",
- defaultValue: 'tier',
- type: {
- name: "String"
- }
- }
-};
-var comp14 = {
- parameterPath: "comp",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "comp",
- defaultValue: 'query',
- type: {
- name: "String"
- }
- }
-};
-var comp15 = {
- parameterPath: "comp",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "comp",
- defaultValue: 'tags',
- type: {
- name: "String"
- }
- }
-};
-var comp16 = {
- parameterPath: "comp",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "comp",
- defaultValue: 'page',
- type: {
- name: "String"
- }
- }
-};
-var comp17 = {
- parameterPath: "comp",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "comp",
- defaultValue: 'pagelist',
- type: {
- name: "String"
- }
- }
-};
-var comp18 = {
- parameterPath: "comp",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "comp",
- defaultValue: 'incrementalcopy',
- type: {
- name: "String"
- }
- }
-};
-var comp19 = {
- parameterPath: "comp",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "comp",
- defaultValue: 'appendblock',
- type: {
- name: "String"
- }
- }
-};
-var comp2 = {
- parameterPath: "comp",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "comp",
- defaultValue: 'list',
- type: {
- name: "String"
- }
- }
-};
-var comp20 = {
- parameterPath: "comp",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "comp",
- defaultValue: 'seal',
- type: {
- name: "String"
+var BlobDownloadHeaders = {
+ serializedName: "blob-download-headers",
+ type: {
+ name: "Composite",
+ className: "BlobDownloadHeaders",
+ modelProperties: {
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ metadata: {
+ serializedName: "x-ms-meta",
+ type: {
+ name: "Dictionary",
+ value: {
+ type: {
+ name: "String"
+ }
+ }
+ },
+ headerCollectionPrefix: "x-ms-meta-"
+ },
+ objectReplicationPolicyId: {
+ serializedName: "x-ms-or-policy-id",
+ type: {
+ name: "String"
+ }
+ },
+ objectReplicationRules: {
+ serializedName: "x-ms-or",
+ type: {
+ name: "Dictionary",
+ value: {
+ type: {
+ name: "String"
+ }
+ }
+ },
+ headerCollectionPrefix: "x-ms-or-"
+ },
+ contentLength: {
+ serializedName: "content-length",
+ type: {
+ name: "Number"
+ }
+ },
+ contentType: {
+ serializedName: "content-type",
+ type: {
+ name: "String"
+ }
+ },
+ contentRange: {
+ serializedName: "content-range",
+ type: {
+ name: "String"
+ }
+ },
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ contentMD5: {
+ serializedName: "content-md5",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ contentEncoding: {
+ serializedName: "content-encoding",
+ type: {
+ name: "String"
+ }
+ },
+ cacheControl: {
+ serializedName: "cache-control",
+ type: {
+ name: "String"
+ }
+ },
+ contentDisposition: {
+ serializedName: "content-disposition",
+ type: {
+ name: "String"
+ }
+ },
+ contentLanguage: {
+ serializedName: "content-language",
+ type: {
+ name: "String"
+ }
+ },
+ blobSequenceNumber: {
+ serializedName: "x-ms-blob-sequence-number",
+ type: {
+ name: "Number"
+ }
+ },
+ blobType: {
+ serializedName: "x-ms-blob-type",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "BlockBlob",
+ "PageBlob",
+ "AppendBlob"
+ ]
+ }
+ },
+ copyCompletedOn: {
+ serializedName: "x-ms-copy-completion-time",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ copyStatusDescription: {
+ serializedName: "x-ms-copy-status-description",
+ type: {
+ name: "String"
+ }
+ },
+ copyId: {
+ serializedName: "x-ms-copy-id",
+ type: {
+ name: "String"
+ }
+ },
+ copyProgress: {
+ serializedName: "x-ms-copy-progress",
+ type: {
+ name: "String"
+ }
+ },
+ copySource: {
+ serializedName: "x-ms-copy-source",
+ type: {
+ name: "String"
+ }
+ },
+ copyStatus: {
+ serializedName: "x-ms-copy-status",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "pending",
+ "success",
+ "aborted",
+ "failed"
+ ]
+ }
+ },
+ leaseDuration: {
+ serializedName: "x-ms-lease-duration",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "infinite",
+ "fixed"
+ ]
+ }
+ },
+ leaseState: {
+ serializedName: "x-ms-lease-state",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "available",
+ "leased",
+ "expired",
+ "breaking",
+ "broken"
+ ]
+ }
+ },
+ leaseStatus: {
+ serializedName: "x-ms-lease-status",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "locked",
+ "unlocked"
+ ]
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ versionId: {
+ serializedName: "x-ms-version-id",
+ type: {
+ name: "String"
+ }
+ },
+ acceptRanges: {
+ serializedName: "accept-ranges",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ blobCommittedBlockCount: {
+ serializedName: "x-ms-blob-committed-block-count",
+ type: {
+ name: "Number"
+ }
+ },
+ isServerEncrypted: {
+ serializedName: "x-ms-server-encrypted",
+ type: {
+ name: "Boolean"
+ }
+ },
+ encryptionKeySha256: {
+ serializedName: "x-ms-encryption-key-sha256",
+ type: {
+ name: "String"
+ }
+ },
+ encryptionScope: {
+ serializedName: "x-ms-encryption-scope",
+ type: {
+ name: "String"
+ }
+ },
+ blobContentMD5: {
+ serializedName: "x-ms-blob-content-md5",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ tagCount: {
+ serializedName: "x-ms-tag-count",
+ type: {
+ name: "Number"
+ }
+ },
+ isSealed: {
+ serializedName: "x-ms-blob-sealed",
+ type: {
+ name: "Boolean"
+ }
+ },
+ contentCrc64: {
+ serializedName: "x-ms-content-crc64",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var comp21 = {
- parameterPath: "comp",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "comp",
- defaultValue: 'block',
- type: {
- name: "String"
- }
- }
-};
-var comp22 = {
- parameterPath: "comp",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "comp",
- defaultValue: 'blocklist',
- type: {
- name: "String"
+var BlobGetPropertiesHeaders = {
+ serializedName: "blob-getproperties-headers",
+ type: {
+ name: "Composite",
+ className: "BlobGetPropertiesHeaders",
+ modelProperties: {
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ createdOn: {
+ serializedName: "x-ms-creation-time",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ metadata: {
+ serializedName: "x-ms-meta",
+ type: {
+ name: "Dictionary",
+ value: {
+ type: {
+ name: "String"
+ }
+ }
+ },
+ headerCollectionPrefix: "x-ms-meta-"
+ },
+ objectReplicationPolicyId: {
+ serializedName: "x-ms-or-policy-id",
+ type: {
+ name: "String"
+ }
+ },
+ objectReplicationRules: {
+ serializedName: "x-ms-or",
+ type: {
+ name: "Dictionary",
+ value: {
+ type: {
+ name: "String"
+ }
+ }
+ },
+ headerCollectionPrefix: "x-ms-or-"
+ },
+ blobType: {
+ serializedName: "x-ms-blob-type",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "BlockBlob",
+ "PageBlob",
+ "AppendBlob"
+ ]
+ }
+ },
+ copyCompletedOn: {
+ serializedName: "x-ms-copy-completion-time",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ copyStatusDescription: {
+ serializedName: "x-ms-copy-status-description",
+ type: {
+ name: "String"
+ }
+ },
+ copyId: {
+ serializedName: "x-ms-copy-id",
+ type: {
+ name: "String"
+ }
+ },
+ copyProgress: {
+ serializedName: "x-ms-copy-progress",
+ type: {
+ name: "String"
+ }
+ },
+ copySource: {
+ serializedName: "x-ms-copy-source",
+ type: {
+ name: "String"
+ }
+ },
+ copyStatus: {
+ serializedName: "x-ms-copy-status",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "pending",
+ "success",
+ "aborted",
+ "failed"
+ ]
+ }
+ },
+ isIncrementalCopy: {
+ serializedName: "x-ms-incremental-copy",
+ type: {
+ name: "Boolean"
+ }
+ },
+ destinationSnapshot: {
+ serializedName: "x-ms-copy-destination-snapshot",
+ type: {
+ name: "String"
+ }
+ },
+ leaseDuration: {
+ serializedName: "x-ms-lease-duration",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "infinite",
+ "fixed"
+ ]
+ }
+ },
+ leaseState: {
+ serializedName: "x-ms-lease-state",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "available",
+ "leased",
+ "expired",
+ "breaking",
+ "broken"
+ ]
+ }
+ },
+ leaseStatus: {
+ serializedName: "x-ms-lease-status",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "locked",
+ "unlocked"
+ ]
+ }
+ },
+ contentLength: {
+ serializedName: "content-length",
+ type: {
+ name: "Number"
+ }
+ },
+ contentType: {
+ serializedName: "content-type",
+ type: {
+ name: "String"
+ }
+ },
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ contentMD5: {
+ serializedName: "content-md5",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ contentEncoding: {
+ serializedName: "content-encoding",
+ type: {
+ name: "String"
+ }
+ },
+ contentDisposition: {
+ serializedName: "content-disposition",
+ type: {
+ name: "String"
+ }
+ },
+ contentLanguage: {
+ serializedName: "content-language",
+ type: {
+ name: "String"
+ }
+ },
+ cacheControl: {
+ serializedName: "cache-control",
+ type: {
+ name: "String"
+ }
+ },
+ blobSequenceNumber: {
+ serializedName: "x-ms-blob-sequence-number",
+ type: {
+ name: "Number"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ acceptRanges: {
+ serializedName: "accept-ranges",
+ type: {
+ name: "String"
+ }
+ },
+ blobCommittedBlockCount: {
+ serializedName: "x-ms-blob-committed-block-count",
+ type: {
+ name: "Number"
+ }
+ },
+ isServerEncrypted: {
+ serializedName: "x-ms-server-encrypted",
+ type: {
+ name: "Boolean"
+ }
+ },
+ encryptionKeySha256: {
+ serializedName: "x-ms-encryption-key-sha256",
+ type: {
+ name: "String"
+ }
+ },
+ encryptionScope: {
+ serializedName: "x-ms-encryption-scope",
+ type: {
+ name: "String"
+ }
+ },
+ accessTier: {
+ serializedName: "x-ms-access-tier",
+ type: {
+ name: "String"
+ }
+ },
+ accessTierInferred: {
+ serializedName: "x-ms-access-tier-inferred",
+ type: {
+ name: "Boolean"
+ }
+ },
+ archiveStatus: {
+ serializedName: "x-ms-archive-status",
+ type: {
+ name: "String"
+ }
+ },
+ accessTierChangedOn: {
+ serializedName: "x-ms-access-tier-change-time",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ versionId: {
+ serializedName: "x-ms-version-id",
+ type: {
+ name: "String"
+ }
+ },
+ isCurrentVersion: {
+ serializedName: "x-ms-is-current-version",
+ type: {
+ name: "Boolean"
+ }
+ },
+ tagCount: {
+ serializedName: "x-ms-tag-count",
+ type: {
+ name: "Number"
+ }
+ },
+ expiresOn: {
+ serializedName: "x-ms-expiry-time",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ isSealed: {
+ serializedName: "x-ms-blob-sealed",
+ type: {
+ name: "Boolean"
+ }
+ },
+ rehydratePriority: {
+ serializedName: "x-ms-rehydrate-priority",
+ type: {
+ name: "String"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var comp3 = {
- parameterPath: "comp",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "comp",
- defaultValue: 'userdelegationkey',
- type: {
- name: "String"
+var BlobDeleteHeaders = {
+ serializedName: "blob-delete-headers",
+ type: {
+ name: "Composite",
+ className: "BlobDeleteHeaders",
+ modelProperties: {
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var comp4 = {
- parameterPath: "comp",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "comp",
- defaultValue: 'batch',
- type: {
- name: "String"
+var BlobSetAccessControlHeaders = {
+ serializedName: "blob-setaccesscontrol-headers",
+ type: {
+ name: "Composite",
+ className: "BlobSetAccessControlHeaders",
+ modelProperties: {
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var comp5 = {
- parameterPath: "comp",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "comp",
- defaultValue: 'blobs',
- type: {
- name: "String"
+var BlobGetAccessControlHeaders = {
+ serializedName: "blob-getaccesscontrol-headers",
+ type: {
+ name: "Composite",
+ className: "BlobGetAccessControlHeaders",
+ modelProperties: {
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ xMsOwner: {
+ serializedName: "x-ms-owner",
+ type: {
+ name: "String"
+ }
+ },
+ xMsGroup: {
+ serializedName: "x-ms-group",
+ type: {
+ name: "String"
+ }
+ },
+ xMsPermissions: {
+ serializedName: "x-ms-permissions",
+ type: {
+ name: "String"
+ }
+ },
+ xMsAcl: {
+ serializedName: "x-ms-acl",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var comp6 = {
- parameterPath: "comp",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "comp",
- defaultValue: 'metadata',
- type: {
- name: "String"
- }
- }
-};
-var comp7 = {
- parameterPath: "comp",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "comp",
- defaultValue: 'acl',
- type: {
- name: "String"
- }
- }
-};
-var comp8 = {
- parameterPath: "comp",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "comp",
- defaultValue: 'undelete',
- type: {
- name: "String"
+var BlobRenameHeaders = {
+ serializedName: "blob-rename-headers",
+ type: {
+ name: "Composite",
+ className: "BlobRenameHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ contentLength: {
+ serializedName: "content-length",
+ type: {
+ name: "Number"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ }
}
}
};
-var comp9 = {
- parameterPath: "comp",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "comp",
- defaultValue: 'lease',
- type: {
- name: "String"
+var PageBlobCreateHeaders = {
+ serializedName: "pageblob-create-headers",
+ type: {
+ name: "Composite",
+ className: "PageBlobCreateHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ contentMD5: {
+ serializedName: "content-md5",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ versionId: {
+ serializedName: "x-ms-version-id",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ isServerEncrypted: {
+ serializedName: "x-ms-request-server-encrypted",
+ type: {
+ name: "Boolean"
+ }
+ },
+ encryptionKeySha256: {
+ serializedName: "x-ms-encryption-key-sha256",
+ type: {
+ name: "String"
+ }
+ },
+ encryptionScope: {
+ serializedName: "x-ms-encryption-scope",
+ type: {
+ name: "String"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var contentDisposition = {
- parameterPath: [
- "options",
- "directoryHttpHeaders",
- "contentDisposition"
- ],
- mapper: {
- serializedName: "x-ms-content-disposition",
- type: {
- name: "String"
+var AppendBlobCreateHeaders = {
+ serializedName: "appendblob-create-headers",
+ type: {
+ name: "Composite",
+ className: "AppendBlobCreateHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ contentMD5: {
+ serializedName: "content-md5",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ versionId: {
+ serializedName: "x-ms-version-id",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ isServerEncrypted: {
+ serializedName: "x-ms-request-server-encrypted",
+ type: {
+ name: "Boolean"
+ }
+ },
+ encryptionKeySha256: {
+ serializedName: "x-ms-encryption-key-sha256",
+ type: {
+ name: "String"
+ }
+ },
+ encryptionScope: {
+ serializedName: "x-ms-encryption-scope",
+ type: {
+ name: "String"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var contentEncoding = {
- parameterPath: [
- "options",
- "directoryHttpHeaders",
- "contentEncoding"
- ],
- mapper: {
- serializedName: "x-ms-content-encoding",
- type: {
- name: "String"
+var BlockBlobUploadHeaders = {
+ serializedName: "blockblob-upload-headers",
+ type: {
+ name: "Composite",
+ className: "BlockBlobUploadHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ contentMD5: {
+ serializedName: "content-md5",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ versionId: {
+ serializedName: "x-ms-version-id",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ isServerEncrypted: {
+ serializedName: "x-ms-request-server-encrypted",
+ type: {
+ name: "Boolean"
+ }
+ },
+ encryptionKeySha256: {
+ serializedName: "x-ms-encryption-key-sha256",
+ type: {
+ name: "String"
+ }
+ },
+ encryptionScope: {
+ serializedName: "x-ms-encryption-scope",
+ type: {
+ name: "String"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var contentLanguage = {
- parameterPath: [
- "options",
- "directoryHttpHeaders",
- "contentLanguage"
- ],
- mapper: {
- serializedName: "x-ms-content-language",
- type: {
- name: "String"
+var BlobUndeleteHeaders = {
+ serializedName: "blob-undelete-headers",
+ type: {
+ name: "Composite",
+ className: "BlobUndeleteHeaders",
+ modelProperties: {
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var contentLength = {
- parameterPath: "contentLength",
- mapper: {
- required: true,
- serializedName: "Content-Length",
- type: {
- name: "Number"
+var BlobSetExpiryHeaders = {
+ serializedName: "blob-setexpiry-headers",
+ type: {
+ name: "Composite",
+ className: "BlobSetExpiryHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var contentType = {
- parameterPath: [
- "options",
- "directoryHttpHeaders",
- "contentType"
- ],
- mapper: {
- serializedName: "x-ms-content-type",
- type: {
- name: "String"
+var BlobSetHTTPHeadersHeaders = {
+ serializedName: "blob-sethttpheaders-headers",
+ type: {
+ name: "Composite",
+ className: "BlobSetHTTPHeadersHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ blobSequenceNumber: {
+ serializedName: "x-ms-blob-sequence-number",
+ type: {
+ name: "Number"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var copyActionAbortConstant = {
- parameterPath: "copyActionAbortConstant",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "x-ms-copy-action",
- defaultValue: 'abort',
- type: {
- name: "String"
- }
- }
-};
-var copyId = {
- parameterPath: "copyId",
- mapper: {
- required: true,
- serializedName: "copyid",
- type: {
- name: "String"
- }
- }
-};
-var copySource = {
- parameterPath: "copySource",
- mapper: {
- required: true,
- serializedName: "x-ms-copy-source",
- type: {
- name: "String"
- }
- }
-};
-var defaultEncryptionScope = {
- parameterPath: [
- "options",
- "containerEncryptionScope",
- "defaultEncryptionScope"
- ],
- mapper: {
- serializedName: "x-ms-default-encryption-scope",
- type: {
- name: "String"
- }
- }
-};
-var deletedContainerName = {
- parameterPath: [
- "options",
- "deletedContainerName"
- ],
- mapper: {
- serializedName: "x-ms-deleted-container-name",
- type: {
- name: "String"
+var BlobSetMetadataHeaders = {
+ serializedName: "blob-setmetadata-headers",
+ type: {
+ name: "Composite",
+ className: "BlobSetMetadataHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ versionId: {
+ serializedName: "x-ms-version-id",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ isServerEncrypted: {
+ serializedName: "x-ms-request-server-encrypted",
+ type: {
+ name: "Boolean"
+ }
+ },
+ encryptionKeySha256: {
+ serializedName: "x-ms-encryption-key-sha256",
+ type: {
+ name: "String"
+ }
+ },
+ encryptionScope: {
+ serializedName: "x-ms-encryption-scope",
+ type: {
+ name: "String"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var deletedContainerVersion = {
- parameterPath: [
- "options",
- "deletedContainerVersion"
- ],
- mapper: {
- serializedName: "x-ms-deleted-container-version",
- type: {
- name: "String"
+var BlobAcquireLeaseHeaders = {
+ serializedName: "blob-acquirelease-headers",
+ type: {
+ name: "Composite",
+ className: "BlobAcquireLeaseHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ leaseId: {
+ serializedName: "x-ms-lease-id",
+ type: {
+ name: "String"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var deleteSnapshots = {
- parameterPath: [
- "options",
- "deleteSnapshots"
- ],
- mapper: {
- serializedName: "x-ms-delete-snapshots",
- type: {
- name: "Enum",
- allowedValues: [
- "include",
- "only"
- ]
+var BlobReleaseLeaseHeaders = {
+ serializedName: "blob-releaselease-headers",
+ type: {
+ name: "Composite",
+ className: "BlobReleaseLeaseHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var delimiter = {
- parameterPath: "delimiter",
- mapper: {
- required: true,
- serializedName: "delimiter",
- type: {
- name: "String"
+var BlobRenewLeaseHeaders = {
+ serializedName: "blob-renewlease-headers",
+ type: {
+ name: "Composite",
+ className: "BlobRenewLeaseHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ leaseId: {
+ serializedName: "x-ms-lease-id",
+ type: {
+ name: "String"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var directoryProperties = {
- parameterPath: [
- "options",
- "directoryProperties"
- ],
- mapper: {
- serializedName: "x-ms-properties",
- type: {
- name: "String"
+var BlobChangeLeaseHeaders = {
+ serializedName: "blob-changelease-headers",
+ type: {
+ name: "Composite",
+ className: "BlobChangeLeaseHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ leaseId: {
+ serializedName: "x-ms-lease-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var duration = {
- parameterPath: [
- "options",
- "duration"
- ],
- mapper: {
- serializedName: "x-ms-lease-duration",
- type: {
- name: "Number"
+var BlobBreakLeaseHeaders = {
+ serializedName: "blob-breaklease-headers",
+ type: {
+ name: "Composite",
+ className: "BlobBreakLeaseHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ leaseTime: {
+ serializedName: "x-ms-lease-time",
+ type: {
+ name: "Number"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var encryptionAlgorithm = {
- parameterPath: [
- "options",
- "cpkInfo",
- "encryptionAlgorithm"
- ],
- mapper: {
- serializedName: "x-ms-encryption-algorithm",
- type: {
- name: "Enum",
- allowedValues: [
- "AES256"
- ]
+var BlobCreateSnapshotHeaders = {
+ serializedName: "blob-createsnapshot-headers",
+ type: {
+ name: "Composite",
+ className: "BlobCreateSnapshotHeaders",
+ modelProperties: {
+ snapshot: {
+ serializedName: "x-ms-snapshot",
+ type: {
+ name: "String"
+ }
+ },
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ versionId: {
+ serializedName: "x-ms-version-id",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ isServerEncrypted: {
+ serializedName: "x-ms-request-server-encrypted",
+ type: {
+ name: "Boolean"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var encryptionKey = {
- parameterPath: [
- "options",
- "cpkInfo",
- "encryptionKey"
- ],
- mapper: {
- serializedName: "x-ms-encryption-key",
- type: {
- name: "String"
- }
- }
-};
-var encryptionKeySha256 = {
- parameterPath: [
- "options",
- "cpkInfo",
- "encryptionKeySha256"
- ],
- mapper: {
- serializedName: "x-ms-encryption-key-sha256",
- type: {
- name: "String"
- }
- }
-};
-var encryptionScope = {
- parameterPath: [
- "options",
- "encryptionScope"
- ],
- mapper: {
- serializedName: "x-ms-encryption-scope",
- type: {
- name: "String"
- }
- }
-};
-var expiresOn = {
- parameterPath: [
- "options",
- "expiresOn"
- ],
- mapper: {
- serializedName: "x-ms-expiry-time",
- type: {
- name: "String"
- }
- }
-};
-var expiryOptions = {
- parameterPath: "expiryOptions",
- mapper: {
- required: true,
- serializedName: "x-ms-expiry-option",
- type: {
- name: "String"
- }
- }
-};
-var group = {
- parameterPath: [
- "options",
- "group"
- ],
- mapper: {
- serializedName: "x-ms-group",
- type: {
- name: "String"
- }
- }
-};
-var ifMatch = {
- parameterPath: [
- "options",
- "modifiedAccessConditions",
- "ifMatch"
- ],
- mapper: {
- serializedName: "If-Match",
- type: {
- name: "String"
- }
- }
-};
-var ifModifiedSince = {
- parameterPath: [
- "options",
- "modifiedAccessConditions",
- "ifModifiedSince"
- ],
- mapper: {
- serializedName: "If-Modified-Since",
- type: {
- name: "DateTimeRfc1123"
- }
- }
-};
-var ifNoneMatch = {
- parameterPath: [
- "options",
- "modifiedAccessConditions",
- "ifNoneMatch"
- ],
- mapper: {
- serializedName: "If-None-Match",
- type: {
- name: "String"
- }
- }
-};
-var ifSequenceNumberEqualTo = {
- parameterPath: [
- "options",
- "sequenceNumberAccessConditions",
- "ifSequenceNumberEqualTo"
- ],
- mapper: {
- serializedName: "x-ms-if-sequence-number-eq",
- type: {
- name: "Number"
- }
- }
-};
-var ifSequenceNumberLessThan = {
- parameterPath: [
- "options",
- "sequenceNumberAccessConditions",
- "ifSequenceNumberLessThan"
- ],
- mapper: {
- serializedName: "x-ms-if-sequence-number-lt",
- type: {
- name: "Number"
- }
- }
-};
-var ifSequenceNumberLessThanOrEqualTo = {
- parameterPath: [
- "options",
- "sequenceNumberAccessConditions",
- "ifSequenceNumberLessThanOrEqualTo"
- ],
- mapper: {
- serializedName: "x-ms-if-sequence-number-le",
- type: {
- name: "Number"
- }
- }
-};
-var ifTags = {
- parameterPath: [
- "options",
- "modifiedAccessConditions",
- "ifTags"
- ],
- mapper: {
- serializedName: "x-ms-if-tags",
- type: {
- name: "String"
- }
- }
-};
-var ifUnmodifiedSince = {
- parameterPath: [
- "options",
- "modifiedAccessConditions",
- "ifUnmodifiedSince"
- ],
- mapper: {
- serializedName: "If-Unmodified-Since",
- type: {
- name: "DateTimeRfc1123"
- }
- }
-};
-var include0 = {
- parameterPath: [
- "options",
- "include"
- ],
- mapper: {
- serializedName: "include",
- type: {
- name: "Sequence",
- element: {
+var BlobStartCopyFromURLHeaders = {
+ serializedName: "blob-startcopyfromurl-headers",
+ type: {
+ name: "Composite",
+ className: "BlobStartCopyFromURLHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ versionId: {
+ serializedName: "x-ms-version-id",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ copyId: {
+ serializedName: "x-ms-copy-id",
+ type: {
+ name: "String"
+ }
+ },
+ copyStatus: {
+ serializedName: "x-ms-copy-status",
type: {
name: "Enum",
allowedValues: [
- "metadata",
- "deleted"
+ "pending",
+ "success",
+ "aborted",
+ "failed"
]
}
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
}
}
- },
- collectionFormat: coreHttp.QueryCollectionFormat.Csv
+ }
};
-var include1 = {
- parameterPath: [
- "options",
- "include"
- ],
- mapper: {
- serializedName: "include",
- type: {
- name: "Sequence",
- element: {
+var BlobCopyFromURLHeaders = {
+ serializedName: "blob-copyfromurl-headers",
+ type: {
+ name: "Composite",
+ className: "BlobCopyFromURLHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ versionId: {
+ serializedName: "x-ms-version-id",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ copyId: {
+ serializedName: "x-ms-copy-id",
+ type: {
+ name: "String"
+ }
+ },
+ copyStatus: {
+ serializedName: "x-ms-copy-status",
type: {
name: "Enum",
allowedValues: [
- "copy",
- "deleted",
- "metadata",
- "snapshots",
- "uncommittedblobs",
- "versions",
- "tags"
+ "success"
]
}
+ },
+ contentMD5: {
+ serializedName: "content-md5",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ xMsContentCrc64: {
+ serializedName: "x-ms-content-crc64",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
}
}
- },
- collectionFormat: coreHttp.QueryCollectionFormat.Csv
-};
-var leaseId0 = {
- parameterPath: [
- "options",
- "leaseAccessConditions",
- "leaseId"
- ],
- mapper: {
- serializedName: "x-ms-lease-id",
- type: {
- name: "String"
- }
- }
-};
-var leaseId1 = {
- parameterPath: "leaseId",
- mapper: {
- required: true,
- serializedName: "x-ms-lease-id",
- type: {
- name: "String"
- }
- }
-};
-var listType = {
- parameterPath: "listType",
- mapper: {
- required: true,
- serializedName: "blocklisttype",
- defaultValue: 'committed',
- type: {
- name: "Enum",
- allowedValues: [
- "committed",
- "uncommitted",
- "all"
- ]
- }
- }
-};
-var marker0 = {
- parameterPath: [
- "options",
- "marker"
- ],
- mapper: {
- serializedName: "marker",
- type: {
- name: "String"
- }
- }
-};
-var maxPageSize = {
- parameterPath: [
- "options",
- "maxPageSize"
- ],
- mapper: {
- serializedName: "maxresults",
- constraints: {
- InclusiveMinimum: 1
- },
- type: {
- name: "Number"
- }
}
};
-var maxSize = {
- parameterPath: [
- "options",
- "appendPositionAccessConditions",
- "maxSize"
- ],
- mapper: {
- serializedName: "x-ms-blob-condition-maxsize",
- type: {
- name: "Number"
+var BlobAbortCopyFromURLHeaders = {
+ serializedName: "blob-abortcopyfromurl-headers",
+ type: {
+ name: "Composite",
+ className: "BlobAbortCopyFromURLHeaders",
+ modelProperties: {
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var metadata = {
- parameterPath: [
- "options",
- "metadata"
- ],
- mapper: {
- serializedName: "x-ms-meta",
- type: {
- name: "Dictionary",
- value: {
+var BlobSetTierHeaders = {
+ serializedName: "blob-settier-headers",
+ type: {
+ name: "Composite",
+ className: "BlobSetTierHeaders",
+ modelProperties: {
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
type: {
name: "String"
}
}
- },
- headerCollectionPrefix: "x-ms-meta-"
- }
-};
-var multipartContentType = {
- parameterPath: "multipartContentType",
- mapper: {
- required: true,
- serializedName: "Content-Type",
- type: {
- name: "String"
}
}
};
-var owner = {
- parameterPath: [
- "options",
- "owner"
- ],
- mapper: {
- serializedName: "x-ms-owner",
- type: {
- name: "String"
- }
- }
-};
-var pageWrite0 = {
- parameterPath: "pageWrite",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "x-ms-page-write",
- defaultValue: 'update',
- type: {
- name: "String"
- }
- }
-};
-var pageWrite1 = {
- parameterPath: "pageWrite",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "x-ms-page-write",
- defaultValue: 'clear',
- type: {
- name: "String"
- }
- }
-};
-var pathRenameMode = {
- parameterPath: [
- "options",
- "pathRenameMode"
- ],
- mapper: {
- serializedName: "mode",
- type: {
- name: "Enum",
- allowedValues: [
- "legacy",
- "posix"
- ]
+var BlobGetAccountInfoHeaders = {
+ serializedName: "blob-getaccountinfo-headers",
+ type: {
+ name: "Composite",
+ className: "BlobGetAccountInfoHeaders",
+ modelProperties: {
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ skuName: {
+ serializedName: "x-ms-sku-name",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "Standard_LRS",
+ "Standard_GRS",
+ "Standard_RAGRS",
+ "Standard_ZRS",
+ "Premium_LRS"
+ ]
+ }
+ },
+ accountKind: {
+ serializedName: "x-ms-account-kind",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "Storage",
+ "BlobStorage",
+ "StorageV2",
+ "FileStorage",
+ "BlockBlobStorage"
+ ]
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var posixAcl = {
- parameterPath: [
- "options",
- "posixAcl"
- ],
- mapper: {
- serializedName: "x-ms-acl",
- type: {
- name: "String"
+var BlockBlobStageBlockHeaders = {
+ serializedName: "blockblob-stageblock-headers",
+ type: {
+ name: "Composite",
+ className: "BlockBlobStageBlockHeaders",
+ modelProperties: {
+ contentMD5: {
+ serializedName: "content-md5",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ xMsContentCrc64: {
+ serializedName: "x-ms-content-crc64",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ isServerEncrypted: {
+ serializedName: "x-ms-request-server-encrypted",
+ type: {
+ name: "Boolean"
+ }
+ },
+ encryptionKeySha256: {
+ serializedName: "x-ms-encryption-key-sha256",
+ type: {
+ name: "String"
+ }
+ },
+ encryptionScope: {
+ serializedName: "x-ms-encryption-scope",
+ type: {
+ name: "String"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var posixPermissions = {
- parameterPath: [
- "options",
- "posixPermissions"
- ],
- mapper: {
- serializedName: "x-ms-permissions",
- type: {
- name: "String"
+var BlockBlobStageBlockFromURLHeaders = {
+ serializedName: "blockblob-stageblockfromurl-headers",
+ type: {
+ name: "Composite",
+ className: "BlockBlobStageBlockFromURLHeaders",
+ modelProperties: {
+ contentMD5: {
+ serializedName: "content-md5",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ xMsContentCrc64: {
+ serializedName: "x-ms-content-crc64",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ isServerEncrypted: {
+ serializedName: "x-ms-request-server-encrypted",
+ type: {
+ name: "Boolean"
+ }
+ },
+ encryptionKeySha256: {
+ serializedName: "x-ms-encryption-key-sha256",
+ type: {
+ name: "String"
+ }
+ },
+ encryptionScope: {
+ serializedName: "x-ms-encryption-scope",
+ type: {
+ name: "String"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var posixUmask = {
- parameterPath: [
- "options",
- "posixUmask"
- ],
- mapper: {
- serializedName: "x-ms-umask",
- type: {
- name: "String"
+var BlockBlobCommitBlockListHeaders = {
+ serializedName: "blockblob-commitblocklist-headers",
+ type: {
+ name: "Composite",
+ className: "BlockBlobCommitBlockListHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ contentMD5: {
+ serializedName: "content-md5",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ xMsContentCrc64: {
+ serializedName: "x-ms-content-crc64",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ versionId: {
+ serializedName: "x-ms-version-id",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ isServerEncrypted: {
+ serializedName: "x-ms-request-server-encrypted",
+ type: {
+ name: "Boolean"
+ }
+ },
+ encryptionKeySha256: {
+ serializedName: "x-ms-encryption-key-sha256",
+ type: {
+ name: "String"
+ }
+ },
+ encryptionScope: {
+ serializedName: "x-ms-encryption-scope",
+ type: {
+ name: "String"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var prefix = {
- parameterPath: [
- "options",
- "prefix"
- ],
- mapper: {
- serializedName: "prefix",
- type: {
- name: "String"
+var BlockBlobGetBlockListHeaders = {
+ serializedName: "blockblob-getblocklist-headers",
+ type: {
+ name: "Composite",
+ className: "BlockBlobGetBlockListHeaders",
+ modelProperties: {
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ contentType: {
+ serializedName: "content-type",
+ type: {
+ name: "String"
+ }
+ },
+ blobContentLength: {
+ serializedName: "x-ms-blob-content-length",
+ type: {
+ name: "Number"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var preventEncryptionScopeOverride = {
- parameterPath: [
- "options",
- "containerEncryptionScope",
- "preventEncryptionScopeOverride"
- ],
- mapper: {
- serializedName: "x-ms-deny-encryption-scope-override",
- type: {
- name: "Boolean"
+var PageBlobUploadPagesHeaders = {
+ serializedName: "pageblob-uploadpages-headers",
+ type: {
+ name: "Composite",
+ className: "PageBlobUploadPagesHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ contentMD5: {
+ serializedName: "content-md5",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ xMsContentCrc64: {
+ serializedName: "x-ms-content-crc64",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ blobSequenceNumber: {
+ serializedName: "x-ms-blob-sequence-number",
+ type: {
+ name: "Number"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ isServerEncrypted: {
+ serializedName: "x-ms-request-server-encrypted",
+ type: {
+ name: "Boolean"
+ }
+ },
+ encryptionKeySha256: {
+ serializedName: "x-ms-encryption-key-sha256",
+ type: {
+ name: "String"
+ }
+ },
+ encryptionScope: {
+ serializedName: "x-ms-encryption-scope",
+ type: {
+ name: "String"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var prevsnapshot = {
- parameterPath: [
- "options",
- "prevsnapshot"
- ],
- mapper: {
- serializedName: "prevsnapshot",
- type: {
- name: "String"
- }
- }
-};
-var prevSnapshotUrl = {
- parameterPath: [
- "options",
- "prevSnapshotUrl"
- ],
- mapper: {
- serializedName: "x-ms-previous-snapshot-url",
- type: {
- name: "String"
- }
- }
-};
-var proposedLeaseId0 = {
- parameterPath: [
- "options",
- "proposedLeaseId"
- ],
- mapper: {
- serializedName: "x-ms-proposed-lease-id",
- type: {
- name: "String"
+var PageBlobClearPagesHeaders = {
+ serializedName: "pageblob-clearpages-headers",
+ type: {
+ name: "Composite",
+ className: "PageBlobClearPagesHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ contentMD5: {
+ serializedName: "content-md5",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ xMsContentCrc64: {
+ serializedName: "x-ms-content-crc64",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ blobSequenceNumber: {
+ serializedName: "x-ms-blob-sequence-number",
+ type: {
+ name: "Number"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var proposedLeaseId1 = {
- parameterPath: "proposedLeaseId",
- mapper: {
- required: true,
- serializedName: "x-ms-proposed-lease-id",
- type: {
- name: "String"
+var PageBlobUploadPagesFromURLHeaders = {
+ serializedName: "pageblob-uploadpagesfromurl-headers",
+ type: {
+ name: "Composite",
+ className: "PageBlobUploadPagesFromURLHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ contentMD5: {
+ serializedName: "content-md5",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ xMsContentCrc64: {
+ serializedName: "x-ms-content-crc64",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ blobSequenceNumber: {
+ serializedName: "x-ms-blob-sequence-number",
+ type: {
+ name: "Number"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ isServerEncrypted: {
+ serializedName: "x-ms-request-server-encrypted",
+ type: {
+ name: "Boolean"
+ }
+ },
+ encryptionKeySha256: {
+ serializedName: "x-ms-encryption-key-sha256",
+ type: {
+ name: "String"
+ }
+ },
+ encryptionScope: {
+ serializedName: "x-ms-encryption-scope",
+ type: {
+ name: "String"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var range0 = {
- parameterPath: [
- "options",
- "range"
- ],
- mapper: {
- serializedName: "x-ms-range",
- type: {
- name: "String"
+var PageBlobGetPageRangesHeaders = {
+ serializedName: "pageblob-getpageranges-headers",
+ type: {
+ name: "Composite",
+ className: "PageBlobGetPageRangesHeaders",
+ modelProperties: {
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ blobContentLength: {
+ serializedName: "x-ms-blob-content-length",
+ type: {
+ name: "Number"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var range1 = {
- parameterPath: "range",
- mapper: {
- required: true,
- serializedName: "x-ms-range",
- type: {
- name: "String"
+var PageBlobGetPageRangesDiffHeaders = {
+ serializedName: "pageblob-getpagerangesdiff-headers",
+ type: {
+ name: "Composite",
+ className: "PageBlobGetPageRangesDiffHeaders",
+ modelProperties: {
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ blobContentLength: {
+ serializedName: "x-ms-blob-content-length",
+ type: {
+ name: "Number"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var rangeGetContentCRC64 = {
- parameterPath: [
- "options",
- "rangeGetContentCRC64"
- ],
- mapper: {
- serializedName: "x-ms-range-get-content-crc64",
- type: {
- name: "Boolean"
+var PageBlobResizeHeaders = {
+ serializedName: "pageblob-resize-headers",
+ type: {
+ name: "Composite",
+ className: "PageBlobResizeHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ blobSequenceNumber: {
+ serializedName: "x-ms-blob-sequence-number",
+ type: {
+ name: "Number"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var rangeGetContentMD5 = {
- parameterPath: [
- "options",
- "rangeGetContentMD5"
- ],
- mapper: {
- serializedName: "x-ms-range-get-content-md5",
- type: {
- name: "Boolean"
+var PageBlobUpdateSequenceNumberHeaders = {
+ serializedName: "pageblob-updatesequencenumber-headers",
+ type: {
+ name: "Composite",
+ className: "PageBlobUpdateSequenceNumberHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ blobSequenceNumber: {
+ serializedName: "x-ms-blob-sequence-number",
+ type: {
+ name: "Number"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var rehydratePriority = {
- parameterPath: [
- "options",
- "rehydratePriority"
- ],
- mapper: {
- serializedName: "x-ms-rehydrate-priority",
- type: {
- name: "String"
+var PageBlobCopyIncrementalHeaders = {
+ serializedName: "pageblob-copyincremental-headers",
+ type: {
+ name: "Composite",
+ className: "PageBlobCopyIncrementalHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ copyId: {
+ serializedName: "x-ms-copy-id",
+ type: {
+ name: "String"
+ }
+ },
+ copyStatus: {
+ serializedName: "x-ms-copy-status",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "pending",
+ "success",
+ "aborted",
+ "failed"
+ ]
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var renameSource = {
- parameterPath: "renameSource",
- mapper: {
- required: true,
- serializedName: "x-ms-rename-source",
- type: {
- name: "String"
- }
- }
-};
-var requestId = {
- parameterPath: [
- "options",
- "requestId"
- ],
- mapper: {
- serializedName: "x-ms-client-request-id",
- type: {
- name: "String"
+var AppendBlobAppendBlockHeaders = {
+ serializedName: "appendblob-appendblock-headers",
+ type: {
+ name: "Composite",
+ className: "AppendBlobAppendBlockHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ contentMD5: {
+ serializedName: "content-md5",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ xMsContentCrc64: {
+ serializedName: "x-ms-content-crc64",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ blobAppendOffset: {
+ serializedName: "x-ms-blob-append-offset",
+ type: {
+ name: "String"
+ }
+ },
+ blobCommittedBlockCount: {
+ serializedName: "x-ms-blob-committed-block-count",
+ type: {
+ name: "Number"
+ }
+ },
+ isServerEncrypted: {
+ serializedName: "x-ms-request-server-encrypted",
+ type: {
+ name: "Boolean"
+ }
+ },
+ encryptionKeySha256: {
+ serializedName: "x-ms-encryption-key-sha256",
+ type: {
+ name: "String"
+ }
+ },
+ encryptionScope: {
+ serializedName: "x-ms-encryption-scope",
+ type: {
+ name: "String"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var restype0 = {
- parameterPath: "restype",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "restype",
- defaultValue: 'service',
- type: {
- name: "String"
+var AppendBlobAppendBlockFromUrlHeaders = {
+ serializedName: "appendblob-appendblockfromurl-headers",
+ type: {
+ name: "Composite",
+ className: "AppendBlobAppendBlockFromUrlHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ contentMD5: {
+ serializedName: "content-md5",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ xMsContentCrc64: {
+ serializedName: "x-ms-content-crc64",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ blobAppendOffset: {
+ serializedName: "x-ms-blob-append-offset",
+ type: {
+ name: "String"
+ }
+ },
+ blobCommittedBlockCount: {
+ serializedName: "x-ms-blob-committed-block-count",
+ type: {
+ name: "Number"
+ }
+ },
+ encryptionKeySha256: {
+ serializedName: "x-ms-encryption-key-sha256",
+ type: {
+ name: "String"
+ }
+ },
+ encryptionScope: {
+ serializedName: "x-ms-encryption-scope",
+ type: {
+ name: "String"
+ }
+ },
+ isServerEncrypted: {
+ serializedName: "x-ms-request-server-encrypted",
+ type: {
+ name: "Boolean"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var restype1 = {
- parameterPath: "restype",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "restype",
- defaultValue: 'account',
- type: {
- name: "String"
+var AppendBlobSealHeaders = {
+ serializedName: "appendblob-seal-headers",
+ type: {
+ name: "Composite",
+ className: "AppendBlobSealHeaders",
+ modelProperties: {
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ isSealed: {
+ serializedName: "x-ms-blob-sealed",
+ type: {
+ name: "Boolean"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var restype2 = {
- parameterPath: "restype",
- mapper: {
- required: true,
- isConstant: true,
- serializedName: "restype",
- defaultValue: 'container',
- type: {
- name: "String"
+var BlobQueryHeaders = {
+ serializedName: "blob-query-headers",
+ type: {
+ name: "Composite",
+ className: "BlobQueryHeaders",
+ modelProperties: {
+ lastModified: {
+ serializedName: "last-modified",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ metadata: {
+ serializedName: "x-ms-meta",
+ type: {
+ name: "Dictionary",
+ value: {
+ type: {
+ name: "String"
+ }
+ }
+ },
+ headerCollectionPrefix: "x-ms-meta-"
+ },
+ contentLength: {
+ serializedName: "content-length",
+ type: {
+ name: "Number"
+ }
+ },
+ contentType: {
+ serializedName: "content-type",
+ type: {
+ name: "String"
+ }
+ },
+ contentRange: {
+ serializedName: "content-range",
+ type: {
+ name: "String"
+ }
+ },
+ etag: {
+ serializedName: "etag",
+ type: {
+ name: "String"
+ }
+ },
+ contentMD5: {
+ serializedName: "content-md5",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ contentEncoding: {
+ serializedName: "content-encoding",
+ type: {
+ name: "String"
+ }
+ },
+ cacheControl: {
+ serializedName: "cache-control",
+ type: {
+ name: "String"
+ }
+ },
+ contentDisposition: {
+ serializedName: "content-disposition",
+ type: {
+ name: "String"
+ }
+ },
+ contentLanguage: {
+ serializedName: "content-language",
+ type: {
+ name: "String"
+ }
+ },
+ blobSequenceNumber: {
+ serializedName: "x-ms-blob-sequence-number",
+ type: {
+ name: "Number"
+ }
+ },
+ blobType: {
+ serializedName: "x-ms-blob-type",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "BlockBlob",
+ "PageBlob",
+ "AppendBlob"
+ ]
+ }
+ },
+ copyCompletionTime: {
+ serializedName: "x-ms-copy-completion-time",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ copyStatusDescription: {
+ serializedName: "x-ms-copy-status-description",
+ type: {
+ name: "String"
+ }
+ },
+ copyId: {
+ serializedName: "x-ms-copy-id",
+ type: {
+ name: "String"
+ }
+ },
+ copyProgress: {
+ serializedName: "x-ms-copy-progress",
+ type: {
+ name: "String"
+ }
+ },
+ copySource: {
+ serializedName: "x-ms-copy-source",
+ type: {
+ name: "String"
+ }
+ },
+ copyStatus: {
+ serializedName: "x-ms-copy-status",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "pending",
+ "success",
+ "aborted",
+ "failed"
+ ]
+ }
+ },
+ leaseDuration: {
+ serializedName: "x-ms-lease-duration",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "infinite",
+ "fixed"
+ ]
+ }
+ },
+ leaseState: {
+ serializedName: "x-ms-lease-state",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "available",
+ "leased",
+ "expired",
+ "breaking",
+ "broken"
+ ]
+ }
+ },
+ leaseStatus: {
+ serializedName: "x-ms-lease-status",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "locked",
+ "unlocked"
+ ]
+ }
+ },
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ acceptRanges: {
+ serializedName: "accept-ranges",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ blobCommittedBlockCount: {
+ serializedName: "x-ms-blob-committed-block-count",
+ type: {
+ name: "Number"
+ }
+ },
+ isServerEncrypted: {
+ serializedName: "x-ms-server-encrypted",
+ type: {
+ name: "Boolean"
+ }
+ },
+ encryptionKeySha256: {
+ serializedName: "x-ms-encryption-key-sha256",
+ type: {
+ name: "String"
+ }
+ },
+ encryptionScope: {
+ serializedName: "x-ms-encryption-scope",
+ type: {
+ name: "String"
+ }
+ },
+ blobContentMD5: {
+ serializedName: "x-ms-blob-content-md5",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ contentCrc64: {
+ serializedName: "x-ms-content-crc64",
+ type: {
+ name: "ByteArray"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
}
}
};
-var sealBlob = {
- parameterPath: [
- "options",
- "sealBlob"
+var BlobGetTagsHeaders = {
+ serializedName: "blob-gettags-headers",
+ type: {
+ name: "Composite",
+ className: "BlobGetTagsHeaders",
+ modelProperties: {
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
+ }
+ }
+};
+var BlobSetTagsHeaders = {
+ serializedName: "blob-settags-headers",
+ type: {
+ name: "Composite",
+ className: "BlobSetTagsHeaders",
+ modelProperties: {
+ clientRequestId: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ requestId: {
+ serializedName: "x-ms-request-id",
+ type: {
+ name: "String"
+ }
+ },
+ version: {
+ serializedName: "x-ms-version",
+ type: {
+ name: "String"
+ }
+ },
+ date: {
+ serializedName: "date",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ },
+ errorCode: {
+ serializedName: "x-ms-error-code",
+ type: {
+ name: "String"
+ }
+ }
+ }
+ }
+};
+
+/*
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ * Licensed under the MIT License. See License.txt in the project root for license information.
+ *
+ * Code generated by Microsoft (R) AutoRest Code Generator.
+ * Changes may cause incorrect behavior and will be lost if the code is regenerated.
+ */
+
+var Mappers = /*#__PURE__*/Object.freeze({
+ __proto__: null,
+ BlobServiceProperties: BlobServiceProperties,
+ BlobServiceStatistics: BlobServiceStatistics,
+ ContainerItem: ContainerItem,
+ ContainerProperties: ContainerProperties,
+ CorsRule: CorsRule,
+ FilterBlobItem: FilterBlobItem,
+ FilterBlobSegment: FilterBlobSegment,
+ GeoReplication: GeoReplication,
+ KeyInfo: KeyInfo,
+ ListContainersSegmentResponse: ListContainersSegmentResponse,
+ Logging: Logging,
+ Metrics: Metrics,
+ RetentionPolicy: RetentionPolicy,
+ ServiceFilterBlobsHeaders: ServiceFilterBlobsHeaders,
+ ServiceGetAccountInfoHeaders: ServiceGetAccountInfoHeaders,
+ ServiceGetPropertiesHeaders: ServiceGetPropertiesHeaders,
+ ServiceGetStatisticsHeaders: ServiceGetStatisticsHeaders,
+ ServiceGetUserDelegationKeyHeaders: ServiceGetUserDelegationKeyHeaders,
+ ServiceListContainersSegmentHeaders: ServiceListContainersSegmentHeaders,
+ ServiceSetPropertiesHeaders: ServiceSetPropertiesHeaders,
+ ServiceSubmitBatchHeaders: ServiceSubmitBatchHeaders,
+ StaticWebsite: StaticWebsite,
+ StorageError: StorageError,
+ UserDelegationKey: UserDelegationKey
+});
+
+/*
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ * Licensed under the MIT License. See License.txt in the project root for
+ * license information.
+ *
+ * Code generated by Microsoft (R) AutoRest Code Generator.
+ * Changes may cause incorrect behavior and will be lost if the code is
+ * regenerated.
+ */
+var access = {
+ parameterPath: [
+ "options",
+ "access"
],
mapper: {
- serializedName: "x-ms-seal-blob",
+ serializedName: "x-ms-blob-public-access",
type: {
- name: "Boolean"
+ name: "String"
}
}
};
-var sequenceNumberAction = {
- parameterPath: "sequenceNumberAction",
+var action0 = {
+ parameterPath: "action",
mapper: {
required: true,
- serializedName: "x-ms-sequence-number-action",
+ isConstant: true,
+ serializedName: "x-ms-lease-action",
+ defaultValue: 'acquire',
type: {
- name: "Enum",
- allowedValues: [
- "max",
- "update",
- "increment"
- ]
+ name: "String"
}
}
};
-var snapshot = {
- parameterPath: [
- "options",
- "snapshot"
- ],
+var action1 = {
+ parameterPath: "action",
mapper: {
- serializedName: "snapshot",
+ required: true,
+ isConstant: true,
+ serializedName: "x-ms-lease-action",
+ defaultValue: 'release',
type: {
name: "String"
}
}
};
-var sourceContentCrc64 = {
- parameterPath: [
- "options",
- "sourceContentCrc64"
- ],
+var action2 = {
+ parameterPath: "action",
mapper: {
- serializedName: "x-ms-source-content-crc64",
+ required: true,
+ isConstant: true,
+ serializedName: "x-ms-lease-action",
+ defaultValue: 'renew',
type: {
- name: "ByteArray"
+ name: "String"
}
}
};
-var sourceContentMD5 = {
+var action3 = {
+ parameterPath: "action",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "x-ms-lease-action",
+ defaultValue: 'break',
+ type: {
+ name: "String"
+ }
+ }
+};
+var action4 = {
+ parameterPath: "action",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "x-ms-lease-action",
+ defaultValue: 'change',
+ type: {
+ name: "String"
+ }
+ }
+};
+var action5 = {
+ parameterPath: "action",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "action",
+ defaultValue: 'setAccessControl',
+ type: {
+ name: "String"
+ }
+ }
+};
+var action6 = {
+ parameterPath: "action",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "action",
+ defaultValue: 'getAccessControl',
+ type: {
+ name: "String"
+ }
+ }
+};
+var appendPosition = {
parameterPath: [
"options",
- "sourceContentMD5"
+ "appendPositionAccessConditions",
+ "appendPosition"
],
mapper: {
- serializedName: "x-ms-source-content-md5",
+ serializedName: "x-ms-blob-condition-appendpos",
type: {
- name: "ByteArray"
+ name: "Number"
}
}
};
-var sourceIfMatch = {
+var blobCacheControl = {
parameterPath: [
"options",
- "sourceModifiedAccessConditions",
- "sourceIfMatch"
+ "blobHTTPHeaders",
+ "blobCacheControl"
],
mapper: {
- serializedName: "x-ms-source-if-match",
+ serializedName: "x-ms-blob-cache-control",
type: {
name: "String"
}
}
};
-var sourceIfModifiedSince = {
+var blobContentDisposition = {
parameterPath: [
"options",
- "sourceModifiedAccessConditions",
- "sourceIfModifiedSince"
+ "blobHTTPHeaders",
+ "blobContentDisposition"
],
mapper: {
- serializedName: "x-ms-source-if-modified-since",
+ serializedName: "x-ms-blob-content-disposition",
type: {
- name: "DateTimeRfc1123"
+ name: "String"
}
}
};
-var sourceIfNoneMatch = {
+var blobContentEncoding = {
parameterPath: [
"options",
- "sourceModifiedAccessConditions",
- "sourceIfNoneMatch"
+ "blobHTTPHeaders",
+ "blobContentEncoding"
],
mapper: {
- serializedName: "x-ms-source-if-none-match",
+ serializedName: "x-ms-blob-content-encoding",
type: {
name: "String"
}
}
};
-var sourceIfTags = {
+var blobContentLanguage = {
parameterPath: [
"options",
- "sourceModifiedAccessConditions",
- "sourceIfTags"
+ "blobHTTPHeaders",
+ "blobContentLanguage"
],
mapper: {
- serializedName: "x-ms-source-if-tags",
+ serializedName: "x-ms-blob-content-language",
type: {
name: "String"
}
}
};
-var sourceIfUnmodifiedSince = {
+var blobContentLength = {
+ parameterPath: "blobContentLength",
+ mapper: {
+ required: true,
+ serializedName: "x-ms-blob-content-length",
+ type: {
+ name: "Number"
+ }
+ }
+};
+var blobContentMD5 = {
parameterPath: [
"options",
- "sourceModifiedAccessConditions",
- "sourceIfUnmodifiedSince"
+ "blobHTTPHeaders",
+ "blobContentMD5"
],
mapper: {
- serializedName: "x-ms-source-if-unmodified-since",
+ serializedName: "x-ms-blob-content-md5",
type: {
- name: "DateTimeRfc1123"
+ name: "ByteArray"
}
}
};
-var sourceLeaseId = {
+var blobContentType = {
parameterPath: [
"options",
- "sourceLeaseId"
+ "blobHTTPHeaders",
+ "blobContentType"
],
mapper: {
- serializedName: "x-ms-source-lease-id",
+ serializedName: "x-ms-blob-content-type",
type: {
name: "String"
}
}
};
-var sourceRange0 = {
- parameterPath: "sourceRange",
+var blobSequenceNumber = {
+ parameterPath: [
+ "options",
+ "blobSequenceNumber"
+ ],
mapper: {
- required: true,
- serializedName: "x-ms-source-range",
+ serializedName: "x-ms-blob-sequence-number",
+ defaultValue: 0,
type: {
- name: "String"
+ name: "Number"
}
}
};
-var sourceRange1 = {
+var blobTagsString = {
parameterPath: [
"options",
- "sourceRange"
+ "blobTagsString"
],
mapper: {
- serializedName: "x-ms-source-range",
+ serializedName: "x-ms-tags",
type: {
name: "String"
}
}
};
-var sourceUrl = {
- parameterPath: "sourceUrl",
+var blobType0 = {
+ parameterPath: "blobType",
mapper: {
required: true,
- serializedName: "x-ms-copy-source",
+ isConstant: true,
+ serializedName: "x-ms-blob-type",
+ defaultValue: 'PageBlob',
type: {
name: "String"
}
}
};
-var tier0 = {
- parameterPath: [
- "options",
- "tier"
- ],
+var blobType1 = {
+ parameterPath: "blobType",
mapper: {
- serializedName: "x-ms-access-tier",
+ required: true,
+ isConstant: true,
+ serializedName: "x-ms-blob-type",
+ defaultValue: 'AppendBlob',
type: {
name: "String"
}
}
};
-var tier1 = {
- parameterPath: "tier",
+var blobType2 = {
+ parameterPath: "blobType",
mapper: {
required: true,
- serializedName: "x-ms-access-tier",
+ isConstant: true,
+ serializedName: "x-ms-blob-type",
+ defaultValue: 'BlockBlob',
type: {
name: "String"
}
}
};
-var timeoutInSeconds = {
+var blockId = {
+ parameterPath: "blockId",
+ mapper: {
+ required: true,
+ serializedName: "blockid",
+ type: {
+ name: "String"
+ }
+ }
+};
+var breakPeriod = {
parameterPath: [
"options",
- "timeoutInSeconds"
+ "breakPeriod"
],
mapper: {
- serializedName: "timeout",
- constraints: {
- InclusiveMinimum: 0
- },
+ serializedName: "x-ms-lease-break-period",
type: {
name: "Number"
}
}
};
-var transactionalContentCrc64 = {
+var cacheControl = {
parameterPath: [
"options",
- "transactionalContentCrc64"
+ "directoryHttpHeaders",
+ "cacheControl"
],
mapper: {
- serializedName: "x-ms-content-crc64",
+ serializedName: "x-ms-cache-control",
type: {
- name: "ByteArray"
+ name: "String"
}
}
};
-var transactionalContentMD5 = {
- parameterPath: [
- "options",
- "transactionalContentMD5"
- ],
+var comp0 = {
+ parameterPath: "comp",
mapper: {
- serializedName: "Content-MD5",
+ required: true,
+ isConstant: true,
+ serializedName: "comp",
+ defaultValue: 'properties',
type: {
- name: "ByteArray"
+ name: "String"
}
}
};
-var upn = {
- parameterPath: [
- "options",
- "upn"
- ],
+var comp1 = {
+ parameterPath: "comp",
mapper: {
- serializedName: "upn",
+ required: true,
+ isConstant: true,
+ serializedName: "comp",
+ defaultValue: 'stats',
type: {
- name: "Boolean"
+ name: "String"
}
}
};
-var url = {
- parameterPath: "url",
+var comp10 = {
+ parameterPath: "comp",
mapper: {
required: true,
- serializedName: "url",
- defaultValue: '',
+ isConstant: true,
+ serializedName: "comp",
+ defaultValue: 'expiry',
type: {
name: "String"
}
- },
- skipEncoding: true
+ }
};
-var version = {
- parameterPath: "version",
+var comp11 = {
+ parameterPath: "comp",
mapper: {
required: true,
isConstant: true,
- serializedName: "x-ms-version",
- defaultValue: '2019-12-12',
+ serializedName: "comp",
+ defaultValue: 'snapshot',
type: {
name: "String"
}
}
};
-var versionId = {
- parameterPath: [
- "options",
- "versionId"
- ],
+var comp12 = {
+ parameterPath: "comp",
mapper: {
- serializedName: "versionid",
+ required: true,
+ isConstant: true,
+ serializedName: "comp",
+ defaultValue: 'copy',
type: {
name: "String"
}
}
};
-var where = {
- parameterPath: [
- "options",
- "where"
- ],
+var comp13 = {
+ parameterPath: "comp",
mapper: {
- serializedName: "where",
+ required: true,
+ isConstant: true,
+ serializedName: "comp",
+ defaultValue: 'tier',
type: {
name: "String"
}
}
};
-var xMsRequiresSync = {
- parameterPath: "xMsRequiresSync",
+var comp14 = {
+ parameterPath: "comp",
mapper: {
required: true,
isConstant: true,
- serializedName: "x-ms-requires-sync",
- defaultValue: 'true',
+ serializedName: "comp",
+ defaultValue: 'query',
type: {
name: "String"
}
}
};
-
-/*
- * Copyright (c) Microsoft Corporation. All rights reserved.
- * Licensed under the MIT License. See License.txt in the project root for
- * license information.
- *
- * Code generated by Microsoft (R) AutoRest Code Generator.
- * Changes may cause incorrect behavior and will be lost if the code is
- * regenerated.
- */
-/** Class representing a Service. */
-var Service = /** @class */ (function () {
- /**
- * Create a Service.
- * @param {StorageClientContext} client Reference to the service client.
- */
- function Service(client) {
- this.client = client;
+var comp15 = {
+ parameterPath: "comp",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "comp",
+ defaultValue: 'tags',
+ type: {
+ name: "String"
+ }
}
- Service.prototype.setProperties = function (blobServiceProperties, options, callback) {
- return this.client.sendOperationRequest({
- blobServiceProperties: blobServiceProperties,
- options: options
- }, setPropertiesOperationSpec, callback);
- };
- Service.prototype.getProperties = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, getPropertiesOperationSpec, callback);
- };
- Service.prototype.getStatistics = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, getStatisticsOperationSpec, callback);
- };
- Service.prototype.listContainersSegment = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, listContainersSegmentOperationSpec, callback);
- };
- Service.prototype.getUserDelegationKey = function (keyInfo, options, callback) {
- return this.client.sendOperationRequest({
- keyInfo: keyInfo,
- options: options
- }, getUserDelegationKeyOperationSpec, callback);
- };
- Service.prototype.getAccountInfo = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, getAccountInfoOperationSpec, callback);
- };
- Service.prototype.submitBatch = function (body, contentLength, multipartContentType, options, callback) {
- return this.client.sendOperationRequest({
- body: body,
- contentLength: contentLength,
- multipartContentType: multipartContentType,
- options: options
- }, submitBatchOperationSpec, callback);
- };
- Service.prototype.filterBlobs = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, filterBlobsOperationSpec, callback);
- };
- return Service;
-}());
-// Operation Specifications
-var serializer = new coreHttp.Serializer(Mappers, true);
-var setPropertiesOperationSpec = {
- httpMethod: "PUT",
- urlParameters: [
- url
- ],
- queryParameters: [
- timeoutInSeconds,
- restype0,
- comp0
- ],
- headerParameters: [
- version,
- requestId
- ],
- requestBody: {
- parameterPath: "blobServiceProperties",
- mapper: tslib.__assign(tslib.__assign({}, BlobServiceProperties), { required: true })
- },
- contentType: "application/xml; charset=utf-8",
- responses: {
- 202: {
- headersMapper: ServiceSetPropertiesHeaders
- },
- default: {
- bodyMapper: StorageError,
- headersMapper: ServiceSetPropertiesHeaders
+};
+var comp16 = {
+ parameterPath: "comp",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "comp",
+ defaultValue: 'page',
+ type: {
+ name: "String"
}
- },
- isXML: true,
- serializer: serializer
+ }
};
-var getPropertiesOperationSpec = {
- httpMethod: "GET",
- urlParameters: [
- url
- ],
- queryParameters: [
- timeoutInSeconds,
- restype0,
- comp0
- ],
- headerParameters: [
- version,
- requestId
- ],
- responses: {
- 200: {
- bodyMapper: BlobServiceProperties,
- headersMapper: ServiceGetPropertiesHeaders
- },
- default: {
- bodyMapper: StorageError,
- headersMapper: ServiceGetPropertiesHeaders
+var comp17 = {
+ parameterPath: "comp",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "comp",
+ defaultValue: 'pagelist',
+ type: {
+ name: "String"
}
- },
- isXML: true,
- serializer: serializer
+ }
};
-var getStatisticsOperationSpec = {
- httpMethod: "GET",
- urlParameters: [
- url
- ],
- queryParameters: [
- timeoutInSeconds,
- restype0,
- comp1
- ],
- headerParameters: [
- version,
- requestId
- ],
- responses: {
- 200: {
- bodyMapper: BlobServiceStatistics,
- headersMapper: ServiceGetStatisticsHeaders
- },
- default: {
- bodyMapper: StorageError,
- headersMapper: ServiceGetStatisticsHeaders
+var comp18 = {
+ parameterPath: "comp",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "comp",
+ defaultValue: 'incrementalcopy',
+ type: {
+ name: "String"
}
- },
- isXML: true,
- serializer: serializer
+ }
};
-var listContainersSegmentOperationSpec = {
- httpMethod: "GET",
- urlParameters: [
- url
- ],
- queryParameters: [
- prefix,
- marker0,
- maxPageSize,
- include0,
- timeoutInSeconds,
- comp2
- ],
- headerParameters: [
- version,
- requestId
- ],
- responses: {
- 200: {
- bodyMapper: ListContainersSegmentResponse,
- headersMapper: ServiceListContainersSegmentHeaders
- },
- default: {
- bodyMapper: StorageError,
- headersMapper: ServiceListContainersSegmentHeaders
+var comp19 = {
+ parameterPath: "comp",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "comp",
+ defaultValue: 'appendblock',
+ type: {
+ name: "String"
}
- },
- isXML: true,
- serializer: serializer
+ }
};
-var getUserDelegationKeyOperationSpec = {
- httpMethod: "POST",
- urlParameters: [
- url
- ],
- queryParameters: [
- timeoutInSeconds,
- restype0,
- comp3
- ],
- headerParameters: [
- version,
- requestId
- ],
- requestBody: {
- parameterPath: "keyInfo",
- mapper: tslib.__assign(tslib.__assign({}, KeyInfo), { required: true })
- },
- contentType: "application/xml; charset=utf-8",
- responses: {
- 200: {
- bodyMapper: UserDelegationKey,
- headersMapper: ServiceGetUserDelegationKeyHeaders
- },
- default: {
- bodyMapper: StorageError,
- headersMapper: ServiceGetUserDelegationKeyHeaders
+var comp2 = {
+ parameterPath: "comp",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "comp",
+ defaultValue: 'list',
+ type: {
+ name: "String"
}
- },
- isXML: true,
- serializer: serializer
+ }
};
-var getAccountInfoOperationSpec = {
- httpMethod: "GET",
- urlParameters: [
- url
- ],
- queryParameters: [
- restype1,
- comp0
- ],
- headerParameters: [
- version
- ],
- responses: {
- 200: {
- headersMapper: ServiceGetAccountInfoHeaders
- },
- default: {
- bodyMapper: StorageError,
- headersMapper: ServiceGetAccountInfoHeaders
+var comp20 = {
+ parameterPath: "comp",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "comp",
+ defaultValue: 'seal',
+ type: {
+ name: "String"
}
- },
- isXML: true,
- serializer: serializer
+ }
};
-var submitBatchOperationSpec = {
- httpMethod: "POST",
- urlParameters: [
- url
+var comp21 = {
+ parameterPath: "comp",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "comp",
+ defaultValue: 'block',
+ type: {
+ name: "String"
+ }
+ }
+};
+var comp22 = {
+ parameterPath: "comp",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "comp",
+ defaultValue: 'blocklist',
+ type: {
+ name: "String"
+ }
+ }
+};
+var comp3 = {
+ parameterPath: "comp",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "comp",
+ defaultValue: 'userdelegationkey',
+ type: {
+ name: "String"
+ }
+ }
+};
+var comp4 = {
+ parameterPath: "comp",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "comp",
+ defaultValue: 'batch',
+ type: {
+ name: "String"
+ }
+ }
+};
+var comp5 = {
+ parameterPath: "comp",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "comp",
+ defaultValue: 'blobs',
+ type: {
+ name: "String"
+ }
+ }
+};
+var comp6 = {
+ parameterPath: "comp",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "comp",
+ defaultValue: 'metadata',
+ type: {
+ name: "String"
+ }
+ }
+};
+var comp7 = {
+ parameterPath: "comp",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "comp",
+ defaultValue: 'acl',
+ type: {
+ name: "String"
+ }
+ }
+};
+var comp8 = {
+ parameterPath: "comp",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "comp",
+ defaultValue: 'undelete',
+ type: {
+ name: "String"
+ }
+ }
+};
+var comp9 = {
+ parameterPath: "comp",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "comp",
+ defaultValue: 'lease',
+ type: {
+ name: "String"
+ }
+ }
+};
+var contentDisposition = {
+ parameterPath: [
+ "options",
+ "directoryHttpHeaders",
+ "contentDisposition"
],
- queryParameters: [
- timeoutInSeconds,
- comp4
+ mapper: {
+ serializedName: "x-ms-content-disposition",
+ type: {
+ name: "String"
+ }
+ }
+};
+var contentEncoding = {
+ parameterPath: [
+ "options",
+ "directoryHttpHeaders",
+ "contentEncoding"
],
- headerParameters: [
- contentLength,
- multipartContentType,
- version,
- requestId
+ mapper: {
+ serializedName: "x-ms-content-encoding",
+ type: {
+ name: "String"
+ }
+ }
+};
+var contentLanguage = {
+ parameterPath: [
+ "options",
+ "directoryHttpHeaders",
+ "contentLanguage"
],
- requestBody: {
- parameterPath: "body",
- mapper: {
- required: true,
- serializedName: "body",
- type: {
- name: "Stream"
- }
+ mapper: {
+ serializedName: "x-ms-content-language",
+ type: {
+ name: "String"
}
- },
- contentType: "application/xml; charset=utf-8",
- responses: {
- 202: {
- bodyMapper: {
- serializedName: "parsedResponse",
- type: {
- name: "Stream"
- }
- },
- headersMapper: ServiceSubmitBatchHeaders
- },
- default: {
- bodyMapper: StorageError,
- headersMapper: ServiceSubmitBatchHeaders
+ }
+};
+var contentLength = {
+ parameterPath: "contentLength",
+ mapper: {
+ required: true,
+ serializedName: "Content-Length",
+ type: {
+ name: "Number"
}
- },
- isXML: true,
- serializer: serializer
+ }
};
-var filterBlobsOperationSpec = {
- httpMethod: "GET",
- urlParameters: [
- url
+var contentType = {
+ parameterPath: [
+ "options",
+ "directoryHttpHeaders",
+ "contentType"
],
- queryParameters: [
- timeoutInSeconds,
- where,
- marker0,
- maxPageSize,
- comp5
+ mapper: {
+ serializedName: "x-ms-content-type",
+ type: {
+ name: "String"
+ }
+ }
+};
+var copyActionAbortConstant = {
+ parameterPath: "copyActionAbortConstant",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "x-ms-copy-action",
+ defaultValue: 'abort',
+ type: {
+ name: "String"
+ }
+ }
+};
+var copyId = {
+ parameterPath: "copyId",
+ mapper: {
+ required: true,
+ serializedName: "copyid",
+ type: {
+ name: "String"
+ }
+ }
+};
+var copySource = {
+ parameterPath: "copySource",
+ mapper: {
+ required: true,
+ serializedName: "x-ms-copy-source",
+ type: {
+ name: "String"
+ }
+ }
+};
+var defaultEncryptionScope = {
+ parameterPath: [
+ "options",
+ "containerEncryptionScope",
+ "defaultEncryptionScope"
],
- headerParameters: [
- version,
- requestId
+ mapper: {
+ serializedName: "x-ms-default-encryption-scope",
+ type: {
+ name: "String"
+ }
+ }
+};
+var deletedContainerName = {
+ parameterPath: [
+ "options",
+ "deletedContainerName"
],
- responses: {
- 200: {
- bodyMapper: FilterBlobSegment,
- headersMapper: ServiceFilterBlobsHeaders
- },
- default: {
- bodyMapper: StorageError,
- headersMapper: ServiceFilterBlobsHeaders
+ mapper: {
+ serializedName: "x-ms-deleted-container-name",
+ type: {
+ name: "String"
}
- },
- isXML: true,
- serializer: serializer
+ }
};
-
-/*
- * Copyright (c) Microsoft Corporation. All rights reserved.
- * Licensed under the MIT License. See License.txt in the project root for license information.
- *
- * Code generated by Microsoft (R) AutoRest Code Generator.
- * Changes may cause incorrect behavior and will be lost if the code is regenerated.
- */
-
-var Mappers$1 = /*#__PURE__*/Object.freeze({
- __proto__: null,
- AccessPolicy: AccessPolicy,
- BlobFlatListSegment: BlobFlatListSegment,
- BlobHierarchyListSegment: BlobHierarchyListSegment,
- BlobItemInternal: BlobItemInternal,
- BlobPrefix: BlobPrefix,
- BlobPropertiesInternal: BlobPropertiesInternal,
- BlobTag: BlobTag,
- BlobTags: BlobTags,
- ContainerAcquireLeaseHeaders: ContainerAcquireLeaseHeaders,
- ContainerBreakLeaseHeaders: ContainerBreakLeaseHeaders,
- ContainerChangeLeaseHeaders: ContainerChangeLeaseHeaders,
- ContainerCreateHeaders: ContainerCreateHeaders,
- ContainerDeleteHeaders: ContainerDeleteHeaders,
- ContainerGetAccessPolicyHeaders: ContainerGetAccessPolicyHeaders,
- ContainerGetAccountInfoHeaders: ContainerGetAccountInfoHeaders,
- ContainerGetPropertiesHeaders: ContainerGetPropertiesHeaders,
- ContainerListBlobFlatSegmentHeaders: ContainerListBlobFlatSegmentHeaders,
- ContainerListBlobHierarchySegmentHeaders: ContainerListBlobHierarchySegmentHeaders,
- ContainerReleaseLeaseHeaders: ContainerReleaseLeaseHeaders,
- ContainerRenewLeaseHeaders: ContainerRenewLeaseHeaders,
- ContainerRestoreHeaders: ContainerRestoreHeaders,
- ContainerSetAccessPolicyHeaders: ContainerSetAccessPolicyHeaders,
- ContainerSetMetadataHeaders: ContainerSetMetadataHeaders,
- ListBlobsFlatSegmentResponse: ListBlobsFlatSegmentResponse,
- ListBlobsHierarchySegmentResponse: ListBlobsHierarchySegmentResponse,
- SignedIdentifier: SignedIdentifier,
- StorageError: StorageError
-});
-
-/*
- * Copyright (c) Microsoft Corporation. All rights reserved.
- * Licensed under the MIT License. See License.txt in the project root for
- * license information.
- *
- * Code generated by Microsoft (R) AutoRest Code Generator.
- * Changes may cause incorrect behavior and will be lost if the code is
- * regenerated.
- */
-/** Class representing a Container. */
-var Container = /** @class */ (function () {
- /**
- * Create a Container.
- * @param {StorageClientContext} client Reference to the service client.
- */
- function Container(client) {
- this.client = client;
- }
- Container.prototype.create = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, createOperationSpec, callback);
- };
- Container.prototype.getProperties = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, getPropertiesOperationSpec$1, callback);
- };
- Container.prototype.deleteMethod = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, deleteMethodOperationSpec, callback);
- };
- Container.prototype.setMetadata = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, setMetadataOperationSpec, callback);
- };
- Container.prototype.getAccessPolicy = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, getAccessPolicyOperationSpec, callback);
- };
- Container.prototype.setAccessPolicy = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, setAccessPolicyOperationSpec, callback);
- };
- Container.prototype.restore = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, restoreOperationSpec, callback);
- };
- Container.prototype.acquireLease = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, acquireLeaseOperationSpec, callback);
- };
- Container.prototype.releaseLease = function (leaseId, options, callback) {
- return this.client.sendOperationRequest({
- leaseId: leaseId,
- options: options
- }, releaseLeaseOperationSpec, callback);
- };
- Container.prototype.renewLease = function (leaseId, options, callback) {
- return this.client.sendOperationRequest({
- leaseId: leaseId,
- options: options
- }, renewLeaseOperationSpec, callback);
- };
- Container.prototype.breakLease = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, breakLeaseOperationSpec, callback);
- };
- Container.prototype.changeLease = function (leaseId, proposedLeaseId, options, callback) {
- return this.client.sendOperationRequest({
- leaseId: leaseId,
- proposedLeaseId: proposedLeaseId,
- options: options
- }, changeLeaseOperationSpec, callback);
- };
- Container.prototype.listBlobFlatSegment = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, listBlobFlatSegmentOperationSpec, callback);
- };
- Container.prototype.listBlobHierarchySegment = function (delimiter, options, callback) {
- return this.client.sendOperationRequest({
- delimiter: delimiter,
- options: options
- }, listBlobHierarchySegmentOperationSpec, callback);
- };
- Container.prototype.getAccountInfo = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, getAccountInfoOperationSpec$1, callback);
- };
- return Container;
-}());
-// Operation Specifications
-var serializer$1 = new coreHttp.Serializer(Mappers$1, true);
-var createOperationSpec = {
- httpMethod: "PUT",
- path: "{containerName}",
- urlParameters: [
- url
- ],
- queryParameters: [
- timeoutInSeconds,
- restype2
- ],
- headerParameters: [
- metadata,
- access,
- version,
- requestId,
- defaultEncryptionScope,
- preventEncryptionScopeOverride
+var deletedContainerVersion = {
+ parameterPath: [
+ "options",
+ "deletedContainerVersion"
],
- responses: {
- 201: {
- headersMapper: ContainerCreateHeaders
- },
- default: {
- bodyMapper: StorageError,
- headersMapper: ContainerCreateHeaders
+ mapper: {
+ serializedName: "x-ms-deleted-container-version",
+ type: {
+ name: "String"
}
- },
- isXML: true,
- serializer: serializer$1
+ }
};
-var getPropertiesOperationSpec$1 = {
- httpMethod: "GET",
- path: "{containerName}",
- urlParameters: [
- url
- ],
- queryParameters: [
- timeoutInSeconds,
- restype2
- ],
- headerParameters: [
- version,
- requestId,
- leaseId0
+var deleteSnapshots = {
+ parameterPath: [
+ "options",
+ "deleteSnapshots"
],
- responses: {
- 200: {
- headersMapper: ContainerGetPropertiesHeaders
- },
- default: {
- bodyMapper: StorageError,
- headersMapper: ContainerGetPropertiesHeaders
+ mapper: {
+ serializedName: "x-ms-delete-snapshots",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "include",
+ "only"
+ ]
}
- },
- isXML: true,
- serializer: serializer$1
+ }
};
-var deleteMethodOperationSpec = {
- httpMethod: "DELETE",
- path: "{containerName}",
- urlParameters: [
- url
- ],
- queryParameters: [
- timeoutInSeconds,
- restype2
- ],
- headerParameters: [
- version,
- requestId,
- leaseId0,
- ifModifiedSince,
- ifUnmodifiedSince
- ],
- responses: {
- 202: {
- headersMapper: ContainerDeleteHeaders
- },
- default: {
- bodyMapper: StorageError,
- headersMapper: ContainerDeleteHeaders
+var delimiter = {
+ parameterPath: "delimiter",
+ mapper: {
+ required: true,
+ serializedName: "delimiter",
+ type: {
+ name: "String"
}
- },
- isXML: true,
- serializer: serializer$1
+ }
};
-var setMetadataOperationSpec = {
- httpMethod: "PUT",
- path: "{containerName}",
- urlParameters: [
- url
- ],
- queryParameters: [
- timeoutInSeconds,
- restype2,
- comp6
- ],
- headerParameters: [
- metadata,
- version,
- requestId,
- leaseId0,
- ifModifiedSince
+var directoryProperties = {
+ parameterPath: [
+ "options",
+ "directoryProperties"
],
- responses: {
- 200: {
- headersMapper: ContainerSetMetadataHeaders
- },
- default: {
- bodyMapper: StorageError,
- headersMapper: ContainerSetMetadataHeaders
+ mapper: {
+ serializedName: "x-ms-properties",
+ type: {
+ name: "String"
}
- },
- isXML: true,
- serializer: serializer$1
+ }
};
-var getAccessPolicyOperationSpec = {
- httpMethod: "GET",
- path: "{containerName}",
- urlParameters: [
- url
+var duration = {
+ parameterPath: [
+ "options",
+ "duration"
],
- queryParameters: [
- timeoutInSeconds,
- restype2,
- comp7
+ mapper: {
+ serializedName: "x-ms-lease-duration",
+ type: {
+ name: "Number"
+ }
+ }
+};
+var encryptionAlgorithm = {
+ parameterPath: [
+ "options",
+ "cpkInfo",
+ "encryptionAlgorithm"
],
- headerParameters: [
- version,
- requestId,
- leaseId0
+ mapper: {
+ serializedName: "x-ms-encryption-algorithm",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "AES256"
+ ]
+ }
+ }
+};
+var encryptionKey = {
+ parameterPath: [
+ "options",
+ "cpkInfo",
+ "encryptionKey"
],
- responses: {
- 200: {
- bodyMapper: {
- xmlElementName: "SignedIdentifier",
- serializedName: "parsedResponse",
- type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "SignedIdentifier"
- }
- }
- }
- },
- headersMapper: ContainerGetAccessPolicyHeaders
- },
- default: {
- bodyMapper: StorageError,
- headersMapper: ContainerGetAccessPolicyHeaders
+ mapper: {
+ serializedName: "x-ms-encryption-key",
+ type: {
+ name: "String"
}
- },
- isXML: true,
- serializer: serializer$1
+ }
};
-var setAccessPolicyOperationSpec = {
- httpMethod: "PUT",
- path: "{containerName}",
- urlParameters: [
- url
+var encryptionKeySha256 = {
+ parameterPath: [
+ "options",
+ "cpkInfo",
+ "encryptionKeySha256"
],
- queryParameters: [
- timeoutInSeconds,
- restype2,
- comp7
+ mapper: {
+ serializedName: "x-ms-encryption-key-sha256",
+ type: {
+ name: "String"
+ }
+ }
+};
+var encryptionScope = {
+ parameterPath: [
+ "options",
+ "encryptionScope"
],
- headerParameters: [
- access,
- version,
- requestId,
- leaseId0,
- ifModifiedSince,
- ifUnmodifiedSince
+ mapper: {
+ serializedName: "x-ms-encryption-scope",
+ type: {
+ name: "String"
+ }
+ }
+};
+var expiresOn = {
+ parameterPath: [
+ "options",
+ "expiresOn"
],
- requestBody: {
- parameterPath: [
- "options",
- "containerAcl"
- ],
- mapper: {
- xmlName: "SignedIdentifiers",
- xmlElementName: "SignedIdentifier",
- serializedName: "containerAcl",
- type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "SignedIdentifier"
- }
- }
- }
+ mapper: {
+ serializedName: "x-ms-expiry-time",
+ type: {
+ name: "String"
}
- },
- contentType: "application/xml; charset=utf-8",
- responses: {
- 200: {
- headersMapper: ContainerSetAccessPolicyHeaders
- },
- default: {
- bodyMapper: StorageError,
- headersMapper: ContainerSetAccessPolicyHeaders
+ }
+};
+var expiryOptions = {
+ parameterPath: "expiryOptions",
+ mapper: {
+ required: true,
+ serializedName: "x-ms-expiry-option",
+ type: {
+ name: "String"
}
- },
- isXML: true,
- serializer: serializer$1
+ }
};
-var restoreOperationSpec = {
- httpMethod: "PUT",
- path: "{containerName}",
- urlParameters: [
- url
+var group = {
+ parameterPath: [
+ "options",
+ "group"
],
- queryParameters: [
- timeoutInSeconds,
- restype2,
- comp8
+ mapper: {
+ serializedName: "x-ms-group",
+ type: {
+ name: "String"
+ }
+ }
+};
+var ifMatch = {
+ parameterPath: [
+ "options",
+ "modifiedAccessConditions",
+ "ifMatch"
],
- headerParameters: [
- version,
- requestId,
- deletedContainerName,
- deletedContainerVersion
+ mapper: {
+ serializedName: "If-Match",
+ type: {
+ name: "String"
+ }
+ }
+};
+var ifModifiedSince = {
+ parameterPath: [
+ "options",
+ "modifiedAccessConditions",
+ "ifModifiedSince"
],
- responses: {
- 201: {
- headersMapper: ContainerRestoreHeaders
- },
- default: {
- bodyMapper: StorageError,
- headersMapper: ContainerRestoreHeaders
+ mapper: {
+ serializedName: "If-Modified-Since",
+ type: {
+ name: "DateTimeRfc1123"
}
- },
- isXML: true,
- serializer: serializer$1
+ }
};
-var acquireLeaseOperationSpec = {
- httpMethod: "PUT",
- path: "{containerName}",
- urlParameters: [
- url
+var ifNoneMatch = {
+ parameterPath: [
+ "options",
+ "modifiedAccessConditions",
+ "ifNoneMatch"
],
- queryParameters: [
- timeoutInSeconds,
- comp9,
- restype2
- ],
- headerParameters: [
- duration,
- proposedLeaseId0,
- version,
- requestId,
- action0,
- ifModifiedSince,
- ifUnmodifiedSince
- ],
- responses: {
- 201: {
- headersMapper: ContainerAcquireLeaseHeaders
- },
- default: {
- bodyMapper: StorageError,
- headersMapper: ContainerAcquireLeaseHeaders
+ mapper: {
+ serializedName: "If-None-Match",
+ type: {
+ name: "String"
}
- },
- isXML: true,
- serializer: serializer$1
+ }
};
-var releaseLeaseOperationSpec = {
- httpMethod: "PUT",
- path: "{containerName}",
- urlParameters: [
- url
+var ifSequenceNumberEqualTo = {
+ parameterPath: [
+ "options",
+ "sequenceNumberAccessConditions",
+ "ifSequenceNumberEqualTo"
],
- queryParameters: [
- timeoutInSeconds,
- comp9,
- restype2
+ mapper: {
+ serializedName: "x-ms-if-sequence-number-eq",
+ type: {
+ name: "Number"
+ }
+ }
+};
+var ifSequenceNumberLessThan = {
+ parameterPath: [
+ "options",
+ "sequenceNumberAccessConditions",
+ "ifSequenceNumberLessThan"
],
- headerParameters: [
- leaseId1,
- version,
- requestId,
- action1,
- ifModifiedSince,
- ifUnmodifiedSince
+ mapper: {
+ serializedName: "x-ms-if-sequence-number-lt",
+ type: {
+ name: "Number"
+ }
+ }
+};
+var ifSequenceNumberLessThanOrEqualTo = {
+ parameterPath: [
+ "options",
+ "sequenceNumberAccessConditions",
+ "ifSequenceNumberLessThanOrEqualTo"
],
- responses: {
- 200: {
- headersMapper: ContainerReleaseLeaseHeaders
- },
- default: {
- bodyMapper: StorageError,
- headersMapper: ContainerReleaseLeaseHeaders
+ mapper: {
+ serializedName: "x-ms-if-sequence-number-le",
+ type: {
+ name: "Number"
}
- },
- isXML: true,
- serializer: serializer$1
+ }
};
-var renewLeaseOperationSpec = {
- httpMethod: "PUT",
- path: "{containerName}",
- urlParameters: [
- url
+var ifTags = {
+ parameterPath: [
+ "options",
+ "modifiedAccessConditions",
+ "ifTags"
],
- queryParameters: [
- timeoutInSeconds,
- comp9,
- restype2
+ mapper: {
+ serializedName: "x-ms-if-tags",
+ type: {
+ name: "String"
+ }
+ }
+};
+var ifUnmodifiedSince = {
+ parameterPath: [
+ "options",
+ "modifiedAccessConditions",
+ "ifUnmodifiedSince"
],
- headerParameters: [
- leaseId1,
- version,
- requestId,
- action2,
- ifModifiedSince,
- ifUnmodifiedSince
+ mapper: {
+ serializedName: "If-Unmodified-Since",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ }
+};
+var include0 = {
+ parameterPath: [
+ "options",
+ "include"
],
- responses: {
- 200: {
- headersMapper: ContainerRenewLeaseHeaders
- },
- default: {
- bodyMapper: StorageError,
- headersMapper: ContainerRenewLeaseHeaders
+ mapper: {
+ serializedName: "include",
+ type: {
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "metadata",
+ "deleted"
+ ]
+ }
+ }
}
},
- isXML: true,
- serializer: serializer$1
+ collectionFormat: coreHttp.QueryCollectionFormat.Csv
};
-var breakLeaseOperationSpec = {
- httpMethod: "PUT",
- path: "{containerName}",
- urlParameters: [
- url
- ],
- queryParameters: [
- timeoutInSeconds,
- comp9,
- restype2
- ],
- headerParameters: [
- breakPeriod,
- version,
- requestId,
- action3,
- ifModifiedSince,
- ifUnmodifiedSince
+var include1 = {
+ parameterPath: [
+ "options",
+ "include"
],
- responses: {
- 202: {
- headersMapper: ContainerBreakLeaseHeaders
- },
- default: {
- bodyMapper: StorageError,
- headersMapper: ContainerBreakLeaseHeaders
+ mapper: {
+ serializedName: "include",
+ type: {
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "copy",
+ "deleted",
+ "metadata",
+ "snapshots",
+ "uncommittedblobs",
+ "versions",
+ "tags"
+ ]
+ }
+ }
}
},
- isXML: true,
- serializer: serializer$1
+ collectionFormat: coreHttp.QueryCollectionFormat.Csv
};
-var changeLeaseOperationSpec = {
- httpMethod: "PUT",
- path: "{containerName}",
- urlParameters: [
- url
+var leaseId0 = {
+ parameterPath: [
+ "options",
+ "leaseAccessConditions",
+ "leaseId"
],
- queryParameters: [
- timeoutInSeconds,
- comp9,
- restype2
+ mapper: {
+ serializedName: "x-ms-lease-id",
+ type: {
+ name: "String"
+ }
+ }
+};
+var leaseId1 = {
+ parameterPath: "leaseId",
+ mapper: {
+ required: true,
+ serializedName: "x-ms-lease-id",
+ type: {
+ name: "String"
+ }
+ }
+};
+var listType = {
+ parameterPath: "listType",
+ mapper: {
+ required: true,
+ serializedName: "blocklisttype",
+ defaultValue: 'committed',
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "committed",
+ "uncommitted",
+ "all"
+ ]
+ }
+ }
+};
+var marker0 = {
+ parameterPath: [
+ "options",
+ "marker"
],
- headerParameters: [
- leaseId1,
- proposedLeaseId1,
- version,
- requestId,
- action4,
- ifModifiedSince,
- ifUnmodifiedSince
+ mapper: {
+ serializedName: "marker",
+ type: {
+ name: "String"
+ }
+ }
+};
+var maxPageSize = {
+ parameterPath: [
+ "options",
+ "maxPageSize"
],
- responses: {
- 200: {
- headersMapper: ContainerChangeLeaseHeaders
+ mapper: {
+ serializedName: "maxresults",
+ constraints: {
+ InclusiveMinimum: 1
},
- default: {
- bodyMapper: StorageError,
- headersMapper: ContainerChangeLeaseHeaders
+ type: {
+ name: "Number"
}
- },
- isXML: true,
- serializer: serializer$1
+ }
};
-var listBlobFlatSegmentOperationSpec = {
- httpMethod: "GET",
- path: "{containerName}",
- urlParameters: [
- url
- ],
- queryParameters: [
- prefix,
- marker0,
- maxPageSize,
- include1,
- timeoutInSeconds,
- restype2,
- comp2
+var maxSize = {
+ parameterPath: [
+ "options",
+ "appendPositionAccessConditions",
+ "maxSize"
],
- headerParameters: [
- version,
- requestId
+ mapper: {
+ serializedName: "x-ms-blob-condition-maxsize",
+ type: {
+ name: "Number"
+ }
+ }
+};
+var metadata = {
+ parameterPath: [
+ "options",
+ "metadata"
],
- responses: {
- 200: {
- bodyMapper: ListBlobsFlatSegmentResponse,
- headersMapper: ContainerListBlobFlatSegmentHeaders
+ mapper: {
+ serializedName: "x-ms-meta",
+ type: {
+ name: "Dictionary",
+ value: {
+ type: {
+ name: "String"
+ }
+ }
},
- default: {
- bodyMapper: StorageError,
- headersMapper: ContainerListBlobFlatSegmentHeaders
+ headerCollectionPrefix: "x-ms-meta-"
+ }
+};
+var multipartContentType = {
+ parameterPath: "multipartContentType",
+ mapper: {
+ required: true,
+ serializedName: "Content-Type",
+ type: {
+ name: "String"
}
- },
- isXML: true,
- serializer: serializer$1
+ }
};
-var listBlobHierarchySegmentOperationSpec = {
- httpMethod: "GET",
- path: "{containerName}",
- urlParameters: [
- url
+var owner = {
+ parameterPath: [
+ "options",
+ "owner"
],
- queryParameters: [
- prefix,
- delimiter,
- marker0,
- maxPageSize,
- include1,
- timeoutInSeconds,
- restype2,
- comp2
+ mapper: {
+ serializedName: "x-ms-owner",
+ type: {
+ name: "String"
+ }
+ }
+};
+var pageWrite0 = {
+ parameterPath: "pageWrite",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "x-ms-page-write",
+ defaultValue: 'update',
+ type: {
+ name: "String"
+ }
+ }
+};
+var pageWrite1 = {
+ parameterPath: "pageWrite",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "x-ms-page-write",
+ defaultValue: 'clear',
+ type: {
+ name: "String"
+ }
+ }
+};
+var pathRenameMode = {
+ parameterPath: [
+ "options",
+ "pathRenameMode"
],
- headerParameters: [
- version,
- requestId
+ mapper: {
+ serializedName: "mode",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "legacy",
+ "posix"
+ ]
+ }
+ }
+};
+var posixAcl = {
+ parameterPath: [
+ "options",
+ "posixAcl"
],
- responses: {
- 200: {
- bodyMapper: ListBlobsHierarchySegmentResponse,
- headersMapper: ContainerListBlobHierarchySegmentHeaders
- },
- default: {
- bodyMapper: StorageError,
- headersMapper: ContainerListBlobHierarchySegmentHeaders
+ mapper: {
+ serializedName: "x-ms-acl",
+ type: {
+ name: "String"
}
- },
- isXML: true,
- serializer: serializer$1
+ }
};
-var getAccountInfoOperationSpec$1 = {
- httpMethod: "GET",
- path: "{containerName}",
- urlParameters: [
- url
+var posixPermissions = {
+ parameterPath: [
+ "options",
+ "posixPermissions"
],
- queryParameters: [
- restype1,
- comp0
+ mapper: {
+ serializedName: "x-ms-permissions",
+ type: {
+ name: "String"
+ }
+ }
+};
+var posixUmask = {
+ parameterPath: [
+ "options",
+ "posixUmask"
],
- headerParameters: [
- version
+ mapper: {
+ serializedName: "x-ms-umask",
+ type: {
+ name: "String"
+ }
+ }
+};
+var prefix = {
+ parameterPath: [
+ "options",
+ "prefix"
],
- responses: {
- 200: {
- headersMapper: ContainerGetAccountInfoHeaders
- },
- default: {
- bodyMapper: StorageError,
- headersMapper: ContainerGetAccountInfoHeaders
+ mapper: {
+ serializedName: "prefix",
+ type: {
+ name: "String"
}
- },
- isXML: true,
- serializer: serializer$1
+ }
};
-
-/*
- * Copyright (c) Microsoft Corporation. All rights reserved.
- * Licensed under the MIT License. See License.txt in the project root for license information.
- *
- * Code generated by Microsoft (R) AutoRest Code Generator.
- * Changes may cause incorrect behavior and will be lost if the code is regenerated.
- */
-
-var Mappers$2 = /*#__PURE__*/Object.freeze({
- __proto__: null,
- BlobAbortCopyFromURLHeaders: BlobAbortCopyFromURLHeaders,
- BlobAcquireLeaseHeaders: BlobAcquireLeaseHeaders,
- BlobBreakLeaseHeaders: BlobBreakLeaseHeaders,
- BlobChangeLeaseHeaders: BlobChangeLeaseHeaders,
- BlobCopyFromURLHeaders: BlobCopyFromURLHeaders,
- BlobCreateSnapshotHeaders: BlobCreateSnapshotHeaders,
- BlobDeleteHeaders: BlobDeleteHeaders,
- BlobDownloadHeaders: BlobDownloadHeaders,
- BlobGetAccessControlHeaders: BlobGetAccessControlHeaders,
- BlobGetAccountInfoHeaders: BlobGetAccountInfoHeaders,
- BlobGetPropertiesHeaders: BlobGetPropertiesHeaders,
- BlobGetTagsHeaders: BlobGetTagsHeaders,
- BlobQueryHeaders: BlobQueryHeaders,
- BlobReleaseLeaseHeaders: BlobReleaseLeaseHeaders,
- BlobRenameHeaders: BlobRenameHeaders,
- BlobRenewLeaseHeaders: BlobRenewLeaseHeaders,
- BlobSetAccessControlHeaders: BlobSetAccessControlHeaders,
- BlobSetExpiryHeaders: BlobSetExpiryHeaders,
- BlobSetHTTPHeadersHeaders: BlobSetHTTPHeadersHeaders,
- BlobSetMetadataHeaders: BlobSetMetadataHeaders,
- BlobSetTagsHeaders: BlobSetTagsHeaders,
- BlobSetTierHeaders: BlobSetTierHeaders,
- BlobStartCopyFromURLHeaders: BlobStartCopyFromURLHeaders,
- BlobTag: BlobTag,
- BlobTags: BlobTags,
- BlobUndeleteHeaders: BlobUndeleteHeaders,
- DataLakeStorageError: DataLakeStorageError,
- DataLakeStorageErrorError: DataLakeStorageErrorError,
- DelimitedTextConfiguration: DelimitedTextConfiguration,
- JsonTextConfiguration: JsonTextConfiguration,
- QueryFormat: QueryFormat,
- QueryRequest: QueryRequest,
- QuerySerialization: QuerySerialization,
- StorageError: StorageError
-});
-
-/*
- * Copyright (c) Microsoft Corporation. All rights reserved.
- * Licensed under the MIT License. See License.txt in the project root for
- * license information.
- *
- * Code generated by Microsoft (R) AutoRest Code Generator.
- * Changes may cause incorrect behavior and will be lost if the code is
- * regenerated.
- */
-/** Class representing a Blob. */
-var Blob$1 = /** @class */ (function () {
- /**
- * Create a Blob.
- * @param {StorageClientContext} client Reference to the service client.
- */
- function Blob(client) {
- this.client = client;
- }
- Blob.prototype.download = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, downloadOperationSpec, callback);
- };
- Blob.prototype.getProperties = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, getPropertiesOperationSpec$2, callback);
- };
- Blob.prototype.deleteMethod = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, deleteMethodOperationSpec$1, callback);
- };
- Blob.prototype.setAccessControl = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, setAccessControlOperationSpec, callback);
- };
- Blob.prototype.getAccessControl = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, getAccessControlOperationSpec, callback);
- };
- Blob.prototype.rename = function (renameSource, options, callback) {
- return this.client.sendOperationRequest({
- renameSource: renameSource,
- options: options
- }, renameOperationSpec, callback);
- };
- Blob.prototype.undelete = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, undeleteOperationSpec, callback);
- };
- Blob.prototype.setExpiry = function (expiryOptions, options, callback) {
- return this.client.sendOperationRequest({
- expiryOptions: expiryOptions,
- options: options
- }, setExpiryOperationSpec, callback);
- };
- Blob.prototype.setHTTPHeaders = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, setHTTPHeadersOperationSpec, callback);
- };
- Blob.prototype.setMetadata = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, setMetadataOperationSpec$1, callback);
- };
- Blob.prototype.acquireLease = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, acquireLeaseOperationSpec$1, callback);
- };
- Blob.prototype.releaseLease = function (leaseId, options, callback) {
- return this.client.sendOperationRequest({
- leaseId: leaseId,
- options: options
- }, releaseLeaseOperationSpec$1, callback);
- };
- Blob.prototype.renewLease = function (leaseId, options, callback) {
- return this.client.sendOperationRequest({
- leaseId: leaseId,
- options: options
- }, renewLeaseOperationSpec$1, callback);
- };
- Blob.prototype.changeLease = function (leaseId, proposedLeaseId, options, callback) {
- return this.client.sendOperationRequest({
- leaseId: leaseId,
- proposedLeaseId: proposedLeaseId,
- options: options
- }, changeLeaseOperationSpec$1, callback);
- };
- Blob.prototype.breakLease = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, breakLeaseOperationSpec$1, callback);
- };
- Blob.prototype.createSnapshot = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, createSnapshotOperationSpec, callback);
- };
- Blob.prototype.startCopyFromURL = function (copySource, options, callback) {
- return this.client.sendOperationRequest({
- copySource: copySource,
- options: options
- }, startCopyFromURLOperationSpec, callback);
- };
- Blob.prototype.copyFromURL = function (copySource, options, callback) {
- return this.client.sendOperationRequest({
- copySource: copySource,
- options: options
- }, copyFromURLOperationSpec, callback);
- };
- Blob.prototype.abortCopyFromURL = function (copyId, options, callback) {
- return this.client.sendOperationRequest({
- copyId: copyId,
- options: options
- }, abortCopyFromURLOperationSpec, callback);
- };
- Blob.prototype.setTier = function (tier, options, callback) {
- return this.client.sendOperationRequest({
- tier: tier,
- options: options
- }, setTierOperationSpec, callback);
- };
- Blob.prototype.getAccountInfo = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, getAccountInfoOperationSpec$2, callback);
- };
- Blob.prototype.query = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, queryOperationSpec, callback);
- };
- Blob.prototype.getTags = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, getTagsOperationSpec, callback);
- };
- Blob.prototype.setTags = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, setTagsOperationSpec, callback);
- };
- return Blob;
-}());
-// Operation Specifications
-var serializer$2 = new coreHttp.Serializer(Mappers$2, true);
-var downloadOperationSpec = {
- httpMethod: "GET",
- path: "{containerName}/{blob}",
- urlParameters: [
- url
- ],
- queryParameters: [
- snapshot,
- versionId,
- timeoutInSeconds
- ],
- headerParameters: [
- range0,
- rangeGetContentMD5,
- rangeGetContentCRC64,
- version,
- requestId,
- leaseId0,
- encryptionKey,
- encryptionKeySha256,
- encryptionAlgorithm,
- ifModifiedSince,
- ifUnmodifiedSince,
- ifMatch,
- ifNoneMatch,
- ifTags
+var preventEncryptionScopeOverride = {
+ parameterPath: [
+ "options",
+ "containerEncryptionScope",
+ "preventEncryptionScopeOverride"
],
- responses: {
- 200: {
- bodyMapper: {
- serializedName: "parsedResponse",
- type: {
- name: "Stream"
- }
- },
- headersMapper: BlobDownloadHeaders
- },
- 206: {
- bodyMapper: {
- serializedName: "parsedResponse",
- type: {
- name: "Stream"
- }
- },
- headersMapper: BlobDownloadHeaders
- },
- default: {
- bodyMapper: StorageError,
- headersMapper: BlobDownloadHeaders
+ mapper: {
+ serializedName: "x-ms-deny-encryption-scope-override",
+ type: {
+ name: "Boolean"
}
- },
- isXML: true,
- serializer: serializer$2
+ }
};
-var getPropertiesOperationSpec$2 = {
- httpMethod: "HEAD",
- path: "{containerName}/{blob}",
- urlParameters: [
- url
- ],
- queryParameters: [
- snapshot,
- versionId,
- timeoutInSeconds
- ],
- headerParameters: [
- version,
- requestId,
- leaseId0,
- encryptionKey,
- encryptionKeySha256,
- encryptionAlgorithm,
- ifModifiedSince,
- ifUnmodifiedSince,
- ifMatch,
- ifNoneMatch,
- ifTags
+var prevsnapshot = {
+ parameterPath: [
+ "options",
+ "prevsnapshot"
],
- responses: {
- 200: {
- headersMapper: BlobGetPropertiesHeaders
- },
- default: {
- bodyMapper: StorageError,
- headersMapper: BlobGetPropertiesHeaders
+ mapper: {
+ serializedName: "prevsnapshot",
+ type: {
+ name: "String"
}
- },
- isXML: true,
- serializer: serializer$2
+ }
};
-var deleteMethodOperationSpec$1 = {
- httpMethod: "DELETE",
- path: "{containerName}/{blob}",
- urlParameters: [
- url
- ],
- queryParameters: [
- snapshot,
- versionId,
- timeoutInSeconds
- ],
- headerParameters: [
- deleteSnapshots,
- version,
- requestId,
- leaseId0,
- ifModifiedSince,
- ifUnmodifiedSince,
- ifMatch,
- ifNoneMatch,
- ifTags
+var prevSnapshotUrl = {
+ parameterPath: [
+ "options",
+ "prevSnapshotUrl"
],
- responses: {
- 202: {
- headersMapper: BlobDeleteHeaders
- },
- default: {
- bodyMapper: StorageError,
- headersMapper: BlobDeleteHeaders
+ mapper: {
+ serializedName: "x-ms-previous-snapshot-url",
+ type: {
+ name: "String"
}
- },
- isXML: true,
- serializer: serializer$2
+ }
};
-var setAccessControlOperationSpec = {
- httpMethod: "PATCH",
- path: "{filesystem}/{path}",
- urlParameters: [
- url
- ],
- queryParameters: [
- timeoutInSeconds,
- action5
- ],
- headerParameters: [
- owner,
- group,
- posixPermissions,
- posixAcl,
- requestId,
- version,
- leaseId0,
- ifMatch,
- ifNoneMatch,
- ifModifiedSince,
- ifUnmodifiedSince
+var proposedLeaseId0 = {
+ parameterPath: [
+ "options",
+ "proposedLeaseId"
],
- responses: {
- 200: {
- headersMapper: BlobSetAccessControlHeaders
- },
- default: {
- bodyMapper: DataLakeStorageError,
- headersMapper: BlobSetAccessControlHeaders
+ mapper: {
+ serializedName: "x-ms-proposed-lease-id",
+ type: {
+ name: "String"
}
- },
- isXML: true,
- serializer: serializer$2
+ }
};
-var getAccessControlOperationSpec = {
- httpMethod: "HEAD",
- path: "{filesystem}/{path}",
- urlParameters: [
- url
- ],
- queryParameters: [
- timeoutInSeconds,
- upn,
- action6
- ],
- headerParameters: [
- requestId,
- version,
- leaseId0,
- ifMatch,
- ifNoneMatch,
- ifModifiedSince,
- ifUnmodifiedSince
- ],
- responses: {
- 200: {
- headersMapper: BlobGetAccessControlHeaders
- },
- default: {
- bodyMapper: DataLakeStorageError,
- headersMapper: BlobGetAccessControlHeaders
+var proposedLeaseId1 = {
+ parameterPath: "proposedLeaseId",
+ mapper: {
+ required: true,
+ serializedName: "x-ms-proposed-lease-id",
+ type: {
+ name: "String"
}
- },
- isXML: true,
- serializer: serializer$2
+ }
};
-var renameOperationSpec = {
- httpMethod: "PUT",
- path: "{filesystem}/{path}",
- urlParameters: [
- url
- ],
- queryParameters: [
- timeoutInSeconds,
- pathRenameMode
- ],
- headerParameters: [
- renameSource,
- directoryProperties,
- posixPermissions,
- posixUmask,
- sourceLeaseId,
- version,
- requestId,
- cacheControl,
- contentType,
- contentEncoding,
- contentLanguage,
- contentDisposition,
- leaseId0,
- ifModifiedSince,
- ifUnmodifiedSince,
- ifMatch,
- ifNoneMatch,
- sourceIfModifiedSince,
- sourceIfUnmodifiedSince,
- sourceIfMatch,
- sourceIfNoneMatch
+var range0 = {
+ parameterPath: [
+ "options",
+ "range"
],
- responses: {
- 201: {
- headersMapper: BlobRenameHeaders
- },
- default: {
- bodyMapper: DataLakeStorageError,
- headersMapper: BlobRenameHeaders
+ mapper: {
+ serializedName: "x-ms-range",
+ type: {
+ name: "String"
}
- },
- isXML: true,
- serializer: serializer$2
+ }
};
-var undeleteOperationSpec = {
- httpMethod: "PUT",
- path: "{containerName}/{blob}",
- urlParameters: [
- url
- ],
- queryParameters: [
- timeoutInSeconds,
- comp8
+var range1 = {
+ parameterPath: "range",
+ mapper: {
+ required: true,
+ serializedName: "x-ms-range",
+ type: {
+ name: "String"
+ }
+ }
+};
+var rangeGetContentCRC64 = {
+ parameterPath: [
+ "options",
+ "rangeGetContentCRC64"
],
- headerParameters: [
- version,
- requestId
+ mapper: {
+ serializedName: "x-ms-range-get-content-crc64",
+ type: {
+ name: "Boolean"
+ }
+ }
+};
+var rangeGetContentMD5 = {
+ parameterPath: [
+ "options",
+ "rangeGetContentMD5"
],
- responses: {
- 200: {
- headersMapper: BlobUndeleteHeaders
- },
- default: {
- bodyMapper: StorageError,
- headersMapper: BlobUndeleteHeaders
+ mapper: {
+ serializedName: "x-ms-range-get-content-md5",
+ type: {
+ name: "Boolean"
}
- },
- isXML: true,
- serializer: serializer$2
+ }
};
-var setExpiryOperationSpec = {
- httpMethod: "PUT",
- path: "{containerName}/{blob}",
- urlParameters: [
- url
+var rehydratePriority = {
+ parameterPath: [
+ "options",
+ "rehydratePriority"
],
- queryParameters: [
- timeoutInSeconds,
- comp10
+ mapper: {
+ serializedName: "x-ms-rehydrate-priority",
+ type: {
+ name: "String"
+ }
+ }
+};
+var renameSource = {
+ parameterPath: "renameSource",
+ mapper: {
+ required: true,
+ serializedName: "x-ms-rename-source",
+ type: {
+ name: "String"
+ }
+ }
+};
+var requestId = {
+ parameterPath: [
+ "options",
+ "requestId"
],
- headerParameters: [
- version,
- requestId,
- expiryOptions,
- expiresOn
+ mapper: {
+ serializedName: "x-ms-client-request-id",
+ type: {
+ name: "String"
+ }
+ }
+};
+var restype0 = {
+ parameterPath: "restype",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "restype",
+ defaultValue: 'service',
+ type: {
+ name: "String"
+ }
+ }
+};
+var restype1 = {
+ parameterPath: "restype",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "restype",
+ defaultValue: 'account',
+ type: {
+ name: "String"
+ }
+ }
+};
+var restype2 = {
+ parameterPath: "restype",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "restype",
+ defaultValue: 'container',
+ type: {
+ name: "String"
+ }
+ }
+};
+var sealBlob = {
+ parameterPath: [
+ "options",
+ "sealBlob"
],
- responses: {
- 200: {
- headersMapper: BlobSetExpiryHeaders
+ mapper: {
+ serializedName: "x-ms-seal-blob",
+ type: {
+ name: "Boolean"
+ }
+ }
+};
+var sequenceNumberAction = {
+ parameterPath: "sequenceNumberAction",
+ mapper: {
+ required: true,
+ serializedName: "x-ms-sequence-number-action",
+ type: {
+ name: "Enum",
+ allowedValues: [
+ "max",
+ "update",
+ "increment"
+ ]
+ }
+ }
+};
+var snapshot = {
+ parameterPath: [
+ "options",
+ "snapshot"
+ ],
+ mapper: {
+ serializedName: "snapshot",
+ type: {
+ name: "String"
+ }
+ }
+};
+var sourceContentCrc64 = {
+ parameterPath: [
+ "options",
+ "sourceContentCrc64"
+ ],
+ mapper: {
+ serializedName: "x-ms-source-content-crc64",
+ type: {
+ name: "ByteArray"
+ }
+ }
+};
+var sourceContentMD5 = {
+ parameterPath: [
+ "options",
+ "sourceContentMD5"
+ ],
+ mapper: {
+ serializedName: "x-ms-source-content-md5",
+ type: {
+ name: "ByteArray"
+ }
+ }
+};
+var sourceIfMatch = {
+ parameterPath: [
+ "options",
+ "sourceModifiedAccessConditions",
+ "sourceIfMatch"
+ ],
+ mapper: {
+ serializedName: "x-ms-source-if-match",
+ type: {
+ name: "String"
+ }
+ }
+};
+var sourceIfModifiedSince = {
+ parameterPath: [
+ "options",
+ "sourceModifiedAccessConditions",
+ "sourceIfModifiedSince"
+ ],
+ mapper: {
+ serializedName: "x-ms-source-if-modified-since",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ }
+};
+var sourceIfNoneMatch = {
+ parameterPath: [
+ "options",
+ "sourceModifiedAccessConditions",
+ "sourceIfNoneMatch"
+ ],
+ mapper: {
+ serializedName: "x-ms-source-if-none-match",
+ type: {
+ name: "String"
+ }
+ }
+};
+var sourceIfTags = {
+ parameterPath: [
+ "options",
+ "sourceModifiedAccessConditions",
+ "sourceIfTags"
+ ],
+ mapper: {
+ serializedName: "x-ms-source-if-tags",
+ type: {
+ name: "String"
+ }
+ }
+};
+var sourceIfUnmodifiedSince = {
+ parameterPath: [
+ "options",
+ "sourceModifiedAccessConditions",
+ "sourceIfUnmodifiedSince"
+ ],
+ mapper: {
+ serializedName: "x-ms-source-if-unmodified-since",
+ type: {
+ name: "DateTimeRfc1123"
+ }
+ }
+};
+var sourceLeaseId = {
+ parameterPath: [
+ "options",
+ "sourceLeaseId"
+ ],
+ mapper: {
+ serializedName: "x-ms-source-lease-id",
+ type: {
+ name: "String"
+ }
+ }
+};
+var sourceRange0 = {
+ parameterPath: "sourceRange",
+ mapper: {
+ required: true,
+ serializedName: "x-ms-source-range",
+ type: {
+ name: "String"
+ }
+ }
+};
+var sourceRange1 = {
+ parameterPath: [
+ "options",
+ "sourceRange"
+ ],
+ mapper: {
+ serializedName: "x-ms-source-range",
+ type: {
+ name: "String"
+ }
+ }
+};
+var sourceUrl = {
+ parameterPath: "sourceUrl",
+ mapper: {
+ required: true,
+ serializedName: "x-ms-copy-source",
+ type: {
+ name: "String"
+ }
+ }
+};
+var tier0 = {
+ parameterPath: [
+ "options",
+ "tier"
+ ],
+ mapper: {
+ serializedName: "x-ms-access-tier",
+ type: {
+ name: "String"
+ }
+ }
+};
+var tier1 = {
+ parameterPath: "tier",
+ mapper: {
+ required: true,
+ serializedName: "x-ms-access-tier",
+ type: {
+ name: "String"
+ }
+ }
+};
+var timeoutInSeconds = {
+ parameterPath: [
+ "options",
+ "timeoutInSeconds"
+ ],
+ mapper: {
+ serializedName: "timeout",
+ constraints: {
+ InclusiveMinimum: 0
},
- default: {
- bodyMapper: StorageError,
- headersMapper: BlobSetExpiryHeaders
+ type: {
+ name: "Number"
+ }
+ }
+};
+var transactionalContentCrc64 = {
+ parameterPath: [
+ "options",
+ "transactionalContentCrc64"
+ ],
+ mapper: {
+ serializedName: "x-ms-content-crc64",
+ type: {
+ name: "ByteArray"
+ }
+ }
+};
+var transactionalContentMD5 = {
+ parameterPath: [
+ "options",
+ "transactionalContentMD5"
+ ],
+ mapper: {
+ serializedName: "Content-MD5",
+ type: {
+ name: "ByteArray"
+ }
+ }
+};
+var upn = {
+ parameterPath: [
+ "options",
+ "upn"
+ ],
+ mapper: {
+ serializedName: "upn",
+ type: {
+ name: "Boolean"
+ }
+ }
+};
+var url = {
+ parameterPath: "url",
+ mapper: {
+ required: true,
+ serializedName: "url",
+ defaultValue: '',
+ type: {
+ name: "String"
}
},
- isXML: true,
- serializer: serializer$2
+ skipEncoding: true
};
-var setHTTPHeadersOperationSpec = {
+var version = {
+ parameterPath: "version",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "x-ms-version",
+ defaultValue: '2019-12-12',
+ type: {
+ name: "String"
+ }
+ }
+};
+var versionId = {
+ parameterPath: [
+ "options",
+ "versionId"
+ ],
+ mapper: {
+ serializedName: "versionid",
+ type: {
+ name: "String"
+ }
+ }
+};
+var where = {
+ parameterPath: [
+ "options",
+ "where"
+ ],
+ mapper: {
+ serializedName: "where",
+ type: {
+ name: "String"
+ }
+ }
+};
+var xMsRequiresSync = {
+ parameterPath: "xMsRequiresSync",
+ mapper: {
+ required: true,
+ isConstant: true,
+ serializedName: "x-ms-requires-sync",
+ defaultValue: 'true',
+ type: {
+ name: "String"
+ }
+ }
+};
+
+/*
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ * Licensed under the MIT License. See License.txt in the project root for
+ * license information.
+ *
+ * Code generated by Microsoft (R) AutoRest Code Generator.
+ * Changes may cause incorrect behavior and will be lost if the code is
+ * regenerated.
+ */
+/** Class representing a Service. */
+var Service = /** @class */ (function () {
+ /**
+ * Create a Service.
+ * @param {StorageClientContext} client Reference to the service client.
+ */
+ function Service(client) {
+ this.client = client;
+ }
+ Service.prototype.setProperties = function (blobServiceProperties, options, callback) {
+ return this.client.sendOperationRequest({
+ blobServiceProperties: blobServiceProperties,
+ options: options
+ }, setPropertiesOperationSpec, callback);
+ };
+ Service.prototype.getProperties = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, getPropertiesOperationSpec, callback);
+ };
+ Service.prototype.getStatistics = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, getStatisticsOperationSpec, callback);
+ };
+ Service.prototype.listContainersSegment = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, listContainersSegmentOperationSpec, callback);
+ };
+ Service.prototype.getUserDelegationKey = function (keyInfo, options, callback) {
+ return this.client.sendOperationRequest({
+ keyInfo: keyInfo,
+ options: options
+ }, getUserDelegationKeyOperationSpec, callback);
+ };
+ Service.prototype.getAccountInfo = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, getAccountInfoOperationSpec, callback);
+ };
+ Service.prototype.submitBatch = function (body, contentLength, multipartContentType, options, callback) {
+ return this.client.sendOperationRequest({
+ body: body,
+ contentLength: contentLength,
+ multipartContentType: multipartContentType,
+ options: options
+ }, submitBatchOperationSpec, callback);
+ };
+ Service.prototype.filterBlobs = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, filterBlobsOperationSpec, callback);
+ };
+ return Service;
+}());
+// Operation Specifications
+var serializer = new coreHttp.Serializer(Mappers, true);
+var setPropertiesOperationSpec = {
httpMethod: "PUT",
- path: "{containerName}/{blob}",
urlParameters: [
url
],
queryParameters: [
timeoutInSeconds,
+ restype0,
comp0
],
headerParameters: [
version,
- requestId,
- blobCacheControl,
- blobContentType,
- blobContentMD5,
- blobContentEncoding,
- blobContentLanguage,
- blobContentDisposition,
- leaseId0,
- ifModifiedSince,
- ifUnmodifiedSince,
- ifMatch,
- ifNoneMatch,
- ifTags
+ requestId
],
+ requestBody: {
+ parameterPath: "blobServiceProperties",
+ mapper: tslib.__assign(tslib.__assign({}, BlobServiceProperties), { required: true })
+ },
+ contentType: "application/xml; charset=utf-8",
responses: {
- 200: {
- headersMapper: BlobSetHTTPHeadersHeaders
+ 202: {
+ headersMapper: ServiceSetPropertiesHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: BlobSetHTTPHeadersHeaders
+ headersMapper: ServiceSetPropertiesHeaders
}
},
isXML: true,
- serializer: serializer$2
+ serializer: serializer
};
-var setMetadataOperationSpec$1 = {
- httpMethod: "PUT",
- path: "{containerName}/{blob}",
+var getPropertiesOperationSpec = {
+ httpMethod: "GET",
urlParameters: [
url
],
queryParameters: [
timeoutInSeconds,
- comp6
+ restype0,
+ comp0
],
headerParameters: [
- metadata,
- encryptionScope,
version,
- requestId,
- leaseId0,
- encryptionKey,
- encryptionKeySha256,
- encryptionAlgorithm,
- ifModifiedSince,
- ifUnmodifiedSince,
- ifMatch,
- ifNoneMatch,
- ifTags
+ requestId
],
responses: {
200: {
- headersMapper: BlobSetMetadataHeaders
+ bodyMapper: BlobServiceProperties,
+ headersMapper: ServiceGetPropertiesHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: BlobSetMetadataHeaders
+ headersMapper: ServiceGetPropertiesHeaders
}
},
isXML: true,
- serializer: serializer$2
+ serializer: serializer
};
-var acquireLeaseOperationSpec$1 = {
- httpMethod: "PUT",
- path: "{containerName}/{blob}",
+var getStatisticsOperationSpec = {
+ httpMethod: "GET",
urlParameters: [
url
],
queryParameters: [
timeoutInSeconds,
- comp9
+ restype0,
+ comp1
],
headerParameters: [
- duration,
- proposedLeaseId0,
version,
- requestId,
- action0,
- ifModifiedSince,
- ifUnmodifiedSince,
- ifMatch,
- ifNoneMatch,
- ifTags
+ requestId
],
responses: {
- 201: {
- headersMapper: BlobAcquireLeaseHeaders
+ 200: {
+ bodyMapper: BlobServiceStatistics,
+ headersMapper: ServiceGetStatisticsHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: BlobAcquireLeaseHeaders
+ headersMapper: ServiceGetStatisticsHeaders
}
},
isXML: true,
- serializer: serializer$2
+ serializer: serializer
};
-var releaseLeaseOperationSpec$1 = {
- httpMethod: "PUT",
- path: "{containerName}/{blob}",
+var listContainersSegmentOperationSpec = {
+ httpMethod: "GET",
urlParameters: [
url
],
queryParameters: [
+ prefix,
+ marker0,
+ maxPageSize,
+ include0,
timeoutInSeconds,
- comp9
+ comp2
],
headerParameters: [
- leaseId1,
version,
- requestId,
- action1,
- ifModifiedSince,
- ifUnmodifiedSince,
- ifMatch,
- ifNoneMatch,
- ifTags
+ requestId
],
responses: {
200: {
- headersMapper: BlobReleaseLeaseHeaders
+ bodyMapper: ListContainersSegmentResponse,
+ headersMapper: ServiceListContainersSegmentHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: BlobReleaseLeaseHeaders
+ headersMapper: ServiceListContainersSegmentHeaders
}
},
isXML: true,
- serializer: serializer$2
+ serializer: serializer
};
-var renewLeaseOperationSpec$1 = {
- httpMethod: "PUT",
- path: "{containerName}/{blob}",
+var getUserDelegationKeyOperationSpec = {
+ httpMethod: "POST",
urlParameters: [
url
],
queryParameters: [
timeoutInSeconds,
- comp9
+ restype0,
+ comp3
],
headerParameters: [
- leaseId1,
version,
- requestId,
- action2,
- ifModifiedSince,
- ifUnmodifiedSince,
- ifMatch,
- ifNoneMatch,
- ifTags
+ requestId
],
+ requestBody: {
+ parameterPath: "keyInfo",
+ mapper: tslib.__assign(tslib.__assign({}, KeyInfo), { required: true })
+ },
+ contentType: "application/xml; charset=utf-8",
responses: {
200: {
- headersMapper: BlobRenewLeaseHeaders
+ bodyMapper: UserDelegationKey,
+ headersMapper: ServiceGetUserDelegationKeyHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: BlobRenewLeaseHeaders
+ headersMapper: ServiceGetUserDelegationKeyHeaders
}
},
isXML: true,
- serializer: serializer$2
+ serializer: serializer
};
-var changeLeaseOperationSpec$1 = {
- httpMethod: "PUT",
- path: "{containerName}/{blob}",
+var getAccountInfoOperationSpec = {
+ httpMethod: "GET",
urlParameters: [
url
],
queryParameters: [
- timeoutInSeconds,
- comp9
+ restype1,
+ comp0
],
headerParameters: [
- leaseId1,
- proposedLeaseId1,
- version,
- requestId,
- action4,
- ifModifiedSince,
- ifUnmodifiedSince,
- ifMatch,
- ifNoneMatch,
- ifTags
+ version
],
responses: {
200: {
- headersMapper: BlobChangeLeaseHeaders
+ headersMapper: ServiceGetAccountInfoHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: BlobChangeLeaseHeaders
+ headersMapper: ServiceGetAccountInfoHeaders
}
},
isXML: true,
- serializer: serializer$2
+ serializer: serializer
};
-var breakLeaseOperationSpec$1 = {
- httpMethod: "PUT",
- path: "{containerName}/{blob}",
+var submitBatchOperationSpec = {
+ httpMethod: "POST",
urlParameters: [
url
],
queryParameters: [
timeoutInSeconds,
- comp9
+ comp4
],
headerParameters: [
- breakPeriod,
+ contentLength,
+ multipartContentType,
version,
- requestId,
- action3,
- ifModifiedSince,
- ifUnmodifiedSince,
- ifMatch,
- ifNoneMatch,
- ifTags
+ requestId
],
+ requestBody: {
+ parameterPath: "body",
+ mapper: {
+ required: true,
+ serializedName: "body",
+ type: {
+ name: "Stream"
+ }
+ }
+ },
+ contentType: "application/xml; charset=utf-8",
responses: {
202: {
- headersMapper: BlobBreakLeaseHeaders
+ bodyMapper: {
+ serializedName: "parsedResponse",
+ type: {
+ name: "Stream"
+ }
+ },
+ headersMapper: ServiceSubmitBatchHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: BlobBreakLeaseHeaders
+ headersMapper: ServiceSubmitBatchHeaders
}
},
isXML: true,
- serializer: serializer$2
+ serializer: serializer
};
-var createSnapshotOperationSpec = {
- httpMethod: "PUT",
- path: "{containerName}/{blob}",
+var filterBlobsOperationSpec = {
+ httpMethod: "GET",
urlParameters: [
url
],
queryParameters: [
timeoutInSeconds,
- comp11
+ where,
+ marker0,
+ maxPageSize,
+ comp5
],
headerParameters: [
- metadata,
- encryptionScope,
version,
- requestId,
- encryptionKey,
- encryptionKeySha256,
- encryptionAlgorithm,
- ifModifiedSince,
- ifUnmodifiedSince,
- ifMatch,
- ifNoneMatch,
- ifTags,
- leaseId0
+ requestId
],
responses: {
- 201: {
- headersMapper: BlobCreateSnapshotHeaders
+ 200: {
+ bodyMapper: FilterBlobSegment,
+ headersMapper: ServiceFilterBlobsHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: BlobCreateSnapshotHeaders
+ headersMapper: ServiceFilterBlobsHeaders
}
},
isXML: true,
- serializer: serializer$2
+ serializer: serializer
};
-var startCopyFromURLOperationSpec = {
+
+/*
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ * Licensed under the MIT License. See License.txt in the project root for license information.
+ *
+ * Code generated by Microsoft (R) AutoRest Code Generator.
+ * Changes may cause incorrect behavior and will be lost if the code is regenerated.
+ */
+
+var Mappers$1 = /*#__PURE__*/Object.freeze({
+ __proto__: null,
+ AccessPolicy: AccessPolicy,
+ BlobFlatListSegment: BlobFlatListSegment,
+ BlobHierarchyListSegment: BlobHierarchyListSegment,
+ BlobItemInternal: BlobItemInternal,
+ BlobPrefix: BlobPrefix,
+ BlobPropertiesInternal: BlobPropertiesInternal,
+ BlobTag: BlobTag,
+ BlobTags: BlobTags,
+ ContainerAcquireLeaseHeaders: ContainerAcquireLeaseHeaders,
+ ContainerBreakLeaseHeaders: ContainerBreakLeaseHeaders,
+ ContainerChangeLeaseHeaders: ContainerChangeLeaseHeaders,
+ ContainerCreateHeaders: ContainerCreateHeaders,
+ ContainerDeleteHeaders: ContainerDeleteHeaders,
+ ContainerGetAccessPolicyHeaders: ContainerGetAccessPolicyHeaders,
+ ContainerGetAccountInfoHeaders: ContainerGetAccountInfoHeaders,
+ ContainerGetPropertiesHeaders: ContainerGetPropertiesHeaders,
+ ContainerListBlobFlatSegmentHeaders: ContainerListBlobFlatSegmentHeaders,
+ ContainerListBlobHierarchySegmentHeaders: ContainerListBlobHierarchySegmentHeaders,
+ ContainerReleaseLeaseHeaders: ContainerReleaseLeaseHeaders,
+ ContainerRenewLeaseHeaders: ContainerRenewLeaseHeaders,
+ ContainerRestoreHeaders: ContainerRestoreHeaders,
+ ContainerSetAccessPolicyHeaders: ContainerSetAccessPolicyHeaders,
+ ContainerSetMetadataHeaders: ContainerSetMetadataHeaders,
+ ListBlobsFlatSegmentResponse: ListBlobsFlatSegmentResponse,
+ ListBlobsHierarchySegmentResponse: ListBlobsHierarchySegmentResponse,
+ SignedIdentifier: SignedIdentifier,
+ StorageError: StorageError
+});
+
+/*
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ * Licensed under the MIT License. See License.txt in the project root for
+ * license information.
+ *
+ * Code generated by Microsoft (R) AutoRest Code Generator.
+ * Changes may cause incorrect behavior and will be lost if the code is
+ * regenerated.
+ */
+/** Class representing a Container. */
+var Container = /** @class */ (function () {
+ /**
+ * Create a Container.
+ * @param {StorageClientContext} client Reference to the service client.
+ */
+ function Container(client) {
+ this.client = client;
+ }
+ Container.prototype.create = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, createOperationSpec, callback);
+ };
+ Container.prototype.getProperties = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, getPropertiesOperationSpec$1, callback);
+ };
+ Container.prototype.deleteMethod = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, deleteMethodOperationSpec, callback);
+ };
+ Container.prototype.setMetadata = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, setMetadataOperationSpec, callback);
+ };
+ Container.prototype.getAccessPolicy = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, getAccessPolicyOperationSpec, callback);
+ };
+ Container.prototype.setAccessPolicy = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, setAccessPolicyOperationSpec, callback);
+ };
+ Container.prototype.restore = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, restoreOperationSpec, callback);
+ };
+ Container.prototype.acquireLease = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, acquireLeaseOperationSpec, callback);
+ };
+ Container.prototype.releaseLease = function (leaseId, options, callback) {
+ return this.client.sendOperationRequest({
+ leaseId: leaseId,
+ options: options
+ }, releaseLeaseOperationSpec, callback);
+ };
+ Container.prototype.renewLease = function (leaseId, options, callback) {
+ return this.client.sendOperationRequest({
+ leaseId: leaseId,
+ options: options
+ }, renewLeaseOperationSpec, callback);
+ };
+ Container.prototype.breakLease = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, breakLeaseOperationSpec, callback);
+ };
+ Container.prototype.changeLease = function (leaseId, proposedLeaseId, options, callback) {
+ return this.client.sendOperationRequest({
+ leaseId: leaseId,
+ proposedLeaseId: proposedLeaseId,
+ options: options
+ }, changeLeaseOperationSpec, callback);
+ };
+ Container.prototype.listBlobFlatSegment = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, listBlobFlatSegmentOperationSpec, callback);
+ };
+ Container.prototype.listBlobHierarchySegment = function (delimiter, options, callback) {
+ return this.client.sendOperationRequest({
+ delimiter: delimiter,
+ options: options
+ }, listBlobHierarchySegmentOperationSpec, callback);
+ };
+ Container.prototype.getAccountInfo = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, getAccountInfoOperationSpec$1, callback);
+ };
+ return Container;
+}());
+// Operation Specifications
+var serializer$1 = new coreHttp.Serializer(Mappers$1, true);
+var createOperationSpec = {
httpMethod: "PUT",
- path: "{containerName}/{blob}",
+ path: "{containerName}",
urlParameters: [
url
],
queryParameters: [
- timeoutInSeconds
+ timeoutInSeconds,
+ restype2
],
headerParameters: [
metadata,
- tier0,
- rehydratePriority,
- copySource,
+ access,
version,
requestId,
- blobTagsString,
- sealBlob,
- sourceIfModifiedSince,
- sourceIfUnmodifiedSince,
- sourceIfMatch,
- sourceIfNoneMatch,
- sourceIfTags,
- ifModifiedSince,
- ifUnmodifiedSince,
- ifMatch,
- ifNoneMatch,
- ifTags,
- leaseId0
+ defaultEncryptionScope,
+ preventEncryptionScopeOverride
],
responses: {
- 202: {
- headersMapper: BlobStartCopyFromURLHeaders
+ 201: {
+ headersMapper: ContainerCreateHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: BlobStartCopyFromURLHeaders
+ headersMapper: ContainerCreateHeaders
}
},
isXML: true,
- serializer: serializer$2
+ serializer: serializer$1
};
-var copyFromURLOperationSpec = {
- httpMethod: "PUT",
- path: "{containerName}/{blob}",
+var getPropertiesOperationSpec$1 = {
+ httpMethod: "GET",
+ path: "{containerName}",
urlParameters: [
url
],
queryParameters: [
- timeoutInSeconds
+ timeoutInSeconds,
+ restype2
],
headerParameters: [
- metadata,
- tier0,
- copySource,
version,
requestId,
- sourceContentMD5,
- blobTagsString,
- xMsRequiresSync,
- sourceIfModifiedSince,
- sourceIfUnmodifiedSince,
- sourceIfMatch,
- sourceIfNoneMatch,
- ifModifiedSince,
- ifUnmodifiedSince,
- ifMatch,
- ifNoneMatch,
- ifTags,
leaseId0
],
responses: {
- 202: {
- headersMapper: BlobCopyFromURLHeaders
+ 200: {
+ headersMapper: ContainerGetPropertiesHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: BlobCopyFromURLHeaders
+ headersMapper: ContainerGetPropertiesHeaders
}
},
isXML: true,
- serializer: serializer$2
+ serializer: serializer$1
};
-var abortCopyFromURLOperationSpec = {
- httpMethod: "PUT",
- path: "{containerName}/{blob}",
+var deleteMethodOperationSpec = {
+ httpMethod: "DELETE",
+ path: "{containerName}",
urlParameters: [
url
],
queryParameters: [
- copyId,
timeoutInSeconds,
- comp12
+ restype2
],
headerParameters: [
version,
requestId,
- copyActionAbortConstant,
- leaseId0
+ leaseId0,
+ ifModifiedSince,
+ ifUnmodifiedSince
],
responses: {
- 204: {
- headersMapper: BlobAbortCopyFromURLHeaders
+ 202: {
+ headersMapper: ContainerDeleteHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: BlobAbortCopyFromURLHeaders
+ headersMapper: ContainerDeleteHeaders
}
},
isXML: true,
- serializer: serializer$2
+ serializer: serializer$1
};
-var setTierOperationSpec = {
+var setMetadataOperationSpec = {
httpMethod: "PUT",
- path: "{containerName}/{blob}",
+ path: "{containerName}",
urlParameters: [
url
],
queryParameters: [
- snapshot,
- versionId,
timeoutInSeconds,
- comp13
+ restype2,
+ comp6
],
headerParameters: [
- tier1,
- rehydratePriority,
+ metadata,
version,
requestId,
leaseId0,
- ifTags
+ ifModifiedSince
],
responses: {
200: {
- headersMapper: BlobSetTierHeaders
- },
- 202: {
- headersMapper: BlobSetTierHeaders
+ headersMapper: ContainerSetMetadataHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: BlobSetTierHeaders
+ headersMapper: ContainerSetMetadataHeaders
}
},
isXML: true,
- serializer: serializer$2
+ serializer: serializer$1
};
-var getAccountInfoOperationSpec$2 = {
+var getAccessPolicyOperationSpec = {
httpMethod: "GET",
- path: "{containerName}/{blob}",
+ path: "{containerName}",
urlParameters: [
url
],
queryParameters: [
- restype1,
- comp0
+ timeoutInSeconds,
+ restype2,
+ comp7
],
headerParameters: [
- version
+ version,
+ requestId,
+ leaseId0
],
responses: {
200: {
- headersMapper: BlobGetAccountInfoHeaders
+ bodyMapper: {
+ xmlElementName: "SignedIdentifier",
+ serializedName: "parsedResponse",
+ type: {
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "SignedIdentifier"
+ }
+ }
+ }
+ },
+ headersMapper: ContainerGetAccessPolicyHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: BlobGetAccountInfoHeaders
+ headersMapper: ContainerGetAccessPolicyHeaders
}
},
isXML: true,
- serializer: serializer$2
+ serializer: serializer$1
};
-var queryOperationSpec = {
- httpMethod: "POST",
- path: "{containerName}/{blob}",
+var setAccessPolicyOperationSpec = {
+ httpMethod: "PUT",
+ path: "{containerName}",
urlParameters: [
url
],
queryParameters: [
- snapshot,
timeoutInSeconds,
- comp14
+ restype2,
+ comp7
],
headerParameters: [
+ access,
version,
requestId,
leaseId0,
- encryptionKey,
- encryptionKeySha256,
- encryptionAlgorithm,
ifModifiedSince,
- ifUnmodifiedSince,
- ifMatch,
- ifNoneMatch,
- ifTags
+ ifUnmodifiedSince
],
requestBody: {
parameterPath: [
"options",
- "queryRequest"
+ "containerAcl"
],
- mapper: QueryRequest
+ mapper: {
+ xmlName: "SignedIdentifiers",
+ xmlElementName: "SignedIdentifier",
+ serializedName: "containerAcl",
+ type: {
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "SignedIdentifier"
+ }
+ }
+ }
+ }
},
contentType: "application/xml; charset=utf-8",
responses: {
200: {
- bodyMapper: {
- serializedName: "parsedResponse",
- type: {
- name: "Stream"
- }
- },
- headersMapper: BlobQueryHeaders
- },
- 206: {
- bodyMapper: {
- serializedName: "parsedResponse",
- type: {
- name: "Stream"
- }
- },
- headersMapper: BlobQueryHeaders
- },
- default: {
- bodyMapper: StorageError,
- headersMapper: BlobQueryHeaders
- }
- },
- isXML: true,
- serializer: serializer$2
-};
-var getTagsOperationSpec = {
- httpMethod: "GET",
- path: "{containerName}/{blob}",
- urlParameters: [
- url
- ],
- queryParameters: [
- timeoutInSeconds,
- snapshot,
- versionId,
- comp15
- ],
- headerParameters: [
- version,
- requestId,
- ifTags
- ],
- responses: {
- 200: {
- bodyMapper: BlobTags,
- headersMapper: BlobGetTagsHeaders
+ headersMapper: ContainerSetAccessPolicyHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: BlobGetTagsHeaders
+ headersMapper: ContainerSetAccessPolicyHeaders
}
},
isXML: true,
- serializer: serializer$2
+ serializer: serializer$1
};
-var setTagsOperationSpec = {
+var restoreOperationSpec = {
httpMethod: "PUT",
- path: "{containerName}/{blob}",
+ path: "{containerName}",
urlParameters: [
url
],
queryParameters: [
timeoutInSeconds,
- versionId,
- comp15
+ restype2,
+ comp8
],
headerParameters: [
version,
- transactionalContentMD5,
- transactionalContentCrc64,
requestId,
- ifTags
+ deletedContainerName,
+ deletedContainerVersion
],
- requestBody: {
- parameterPath: [
- "options",
- "tags"
- ],
- mapper: BlobTags
- },
- contentType: "application/xml; charset=utf-8",
responses: {
- 204: {
- headersMapper: BlobSetTagsHeaders
+ 201: {
+ headersMapper: ContainerRestoreHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: BlobSetTagsHeaders
+ headersMapper: ContainerRestoreHeaders
}
},
isXML: true,
- serializer: serializer$2
+ serializer: serializer$1
};
-
-/*
- * Copyright (c) Microsoft Corporation. All rights reserved.
- * Licensed under the MIT License. See License.txt in the project root for license information.
- *
- * Code generated by Microsoft (R) AutoRest Code Generator.
- * Changes may cause incorrect behavior and will be lost if the code is regenerated.
- */
-
-var Mappers$3 = /*#__PURE__*/Object.freeze({
- __proto__: null,
- ClearRange: ClearRange,
- PageBlobClearPagesHeaders: PageBlobClearPagesHeaders,
- PageBlobCopyIncrementalHeaders: PageBlobCopyIncrementalHeaders,
- PageBlobCreateHeaders: PageBlobCreateHeaders,
- PageBlobGetPageRangesDiffHeaders: PageBlobGetPageRangesDiffHeaders,
- PageBlobGetPageRangesHeaders: PageBlobGetPageRangesHeaders,
- PageBlobResizeHeaders: PageBlobResizeHeaders,
- PageBlobUpdateSequenceNumberHeaders: PageBlobUpdateSequenceNumberHeaders,
- PageBlobUploadPagesFromURLHeaders: PageBlobUploadPagesFromURLHeaders,
- PageBlobUploadPagesHeaders: PageBlobUploadPagesHeaders,
- PageList: PageList,
- PageRange: PageRange,
- StorageError: StorageError
-});
-
-/*
- * Copyright (c) Microsoft Corporation. All rights reserved.
- * Licensed under the MIT License. See License.txt in the project root for
- * license information.
- *
- * Code generated by Microsoft (R) AutoRest Code Generator.
- * Changes may cause incorrect behavior and will be lost if the code is
- * regenerated.
- */
-/** Class representing a PageBlob. */
-var PageBlob = /** @class */ (function () {
- /**
- * Create a PageBlob.
- * @param {StorageClientContext} client Reference to the service client.
- */
- function PageBlob(client) {
- this.client = client;
- }
- PageBlob.prototype.create = function (contentLength, blobContentLength, options, callback) {
- return this.client.sendOperationRequest({
- contentLength: contentLength,
- blobContentLength: blobContentLength,
- options: options
- }, createOperationSpec$1, callback);
- };
- PageBlob.prototype.uploadPages = function (body, contentLength, options, callback) {
- return this.client.sendOperationRequest({
- body: body,
- contentLength: contentLength,
- options: options
- }, uploadPagesOperationSpec, callback);
- };
- PageBlob.prototype.clearPages = function (contentLength, options, callback) {
- return this.client.sendOperationRequest({
- contentLength: contentLength,
- options: options
- }, clearPagesOperationSpec, callback);
- };
- PageBlob.prototype.uploadPagesFromURL = function (sourceUrl, sourceRange, contentLength, range, options, callback) {
- return this.client.sendOperationRequest({
- sourceUrl: sourceUrl,
- sourceRange: sourceRange,
- contentLength: contentLength,
- range: range,
- options: options
- }, uploadPagesFromURLOperationSpec, callback);
- };
- PageBlob.prototype.getPageRanges = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, getPageRangesOperationSpec, callback);
- };
- PageBlob.prototype.getPageRangesDiff = function (options, callback) {
- return this.client.sendOperationRequest({
- options: options
- }, getPageRangesDiffOperationSpec, callback);
- };
- PageBlob.prototype.resize = function (blobContentLength, options, callback) {
- return this.client.sendOperationRequest({
- blobContentLength: blobContentLength,
- options: options
- }, resizeOperationSpec, callback);
- };
- PageBlob.prototype.updateSequenceNumber = function (sequenceNumberAction, options, callback) {
- return this.client.sendOperationRequest({
- sequenceNumberAction: sequenceNumberAction,
- options: options
- }, updateSequenceNumberOperationSpec, callback);
- };
- PageBlob.prototype.copyIncremental = function (copySource, options, callback) {
- return this.client.sendOperationRequest({
- copySource: copySource,
- options: options
- }, copyIncrementalOperationSpec, callback);
- };
- return PageBlob;
-}());
-// Operation Specifications
-var serializer$3 = new coreHttp.Serializer(Mappers$3, true);
-var createOperationSpec$1 = {
+var acquireLeaseOperationSpec = {
httpMethod: "PUT",
- path: "{containerName}/{blob}",
+ path: "{containerName}",
urlParameters: [
url
],
queryParameters: [
- timeoutInSeconds
+ timeoutInSeconds,
+ comp9,
+ restype2
],
headerParameters: [
- contentLength,
- tier0,
- metadata,
- encryptionScope,
- blobContentLength,
- blobSequenceNumber,
+ duration,
+ proposedLeaseId0,
version,
requestId,
- blobTagsString,
- blobType0,
- blobContentType,
- blobContentEncoding,
- blobContentLanguage,
- blobContentMD5,
- blobCacheControl,
- blobContentDisposition,
- leaseId0,
- encryptionKey,
- encryptionKeySha256,
- encryptionAlgorithm,
+ action0,
ifModifiedSince,
- ifUnmodifiedSince,
- ifMatch,
- ifNoneMatch,
- ifTags
+ ifUnmodifiedSince
],
responses: {
201: {
- headersMapper: PageBlobCreateHeaders
+ headersMapper: ContainerAcquireLeaseHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: PageBlobCreateHeaders
+ headersMapper: ContainerAcquireLeaseHeaders
}
},
isXML: true,
- serializer: serializer$3
+ serializer: serializer$1
};
-var uploadPagesOperationSpec = {
+var releaseLeaseOperationSpec = {
httpMethod: "PUT",
- path: "{containerName}/{blob}",
+ path: "{containerName}",
urlParameters: [
url
],
queryParameters: [
timeoutInSeconds,
- comp16
+ comp9,
+ restype2
],
headerParameters: [
- contentLength,
- transactionalContentMD5,
- transactionalContentCrc64,
- range0,
- encryptionScope,
+ leaseId1,
version,
requestId,
- pageWrite0,
- leaseId0,
- encryptionKey,
- encryptionKeySha256,
- encryptionAlgorithm,
- ifSequenceNumberLessThanOrEqualTo,
- ifSequenceNumberLessThan,
- ifSequenceNumberEqualTo,
+ action1,
ifModifiedSince,
- ifUnmodifiedSince,
- ifMatch,
- ifNoneMatch,
- ifTags
+ ifUnmodifiedSince
],
- requestBody: {
- parameterPath: "body",
- mapper: {
- required: true,
- serializedName: "body",
- type: {
- name: "Stream"
- }
- }
- },
- contentType: "application/octet-stream",
responses: {
- 201: {
- headersMapper: PageBlobUploadPagesHeaders
+ 200: {
+ headersMapper: ContainerReleaseLeaseHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: PageBlobUploadPagesHeaders
+ headersMapper: ContainerReleaseLeaseHeaders
}
},
isXML: true,
- serializer: serializer$3
+ serializer: serializer$1
};
-var clearPagesOperationSpec = {
+var renewLeaseOperationSpec = {
httpMethod: "PUT",
- path: "{containerName}/{blob}",
+ path: "{containerName}",
urlParameters: [
url
],
queryParameters: [
timeoutInSeconds,
- comp16
+ comp9,
+ restype2
],
headerParameters: [
- contentLength,
- range0,
- encryptionScope,
+ leaseId1,
version,
requestId,
- pageWrite1,
- leaseId0,
- encryptionKey,
- encryptionKeySha256,
- encryptionAlgorithm,
- ifSequenceNumberLessThanOrEqualTo,
- ifSequenceNumberLessThan,
- ifSequenceNumberEqualTo,
+ action2,
ifModifiedSince,
- ifUnmodifiedSince,
- ifMatch,
- ifNoneMatch,
- ifTags
+ ifUnmodifiedSince
],
responses: {
- 201: {
- headersMapper: PageBlobClearPagesHeaders
+ 200: {
+ headersMapper: ContainerRenewLeaseHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: PageBlobClearPagesHeaders
+ headersMapper: ContainerRenewLeaseHeaders
}
},
isXML: true,
- serializer: serializer$3
+ serializer: serializer$1
};
-var uploadPagesFromURLOperationSpec = {
+var breakLeaseOperationSpec = {
httpMethod: "PUT",
- path: "{containerName}/{blob}",
+ path: "{containerName}",
urlParameters: [
url
],
queryParameters: [
timeoutInSeconds,
- comp16
+ comp9,
+ restype2
],
headerParameters: [
- sourceUrl,
- sourceRange0,
- sourceContentMD5,
- sourceContentCrc64,
- contentLength,
- range1,
- encryptionScope,
+ breakPeriod,
version,
requestId,
- pageWrite0,
- encryptionKey,
- encryptionKeySha256,
- encryptionAlgorithm,
- leaseId0,
- ifSequenceNumberLessThanOrEqualTo,
- ifSequenceNumberLessThan,
- ifSequenceNumberEqualTo,
+ action3,
ifModifiedSince,
- ifUnmodifiedSince,
- ifMatch,
- ifNoneMatch,
- ifTags,
- sourceIfModifiedSince,
- sourceIfUnmodifiedSince,
- sourceIfMatch,
- sourceIfNoneMatch
+ ifUnmodifiedSince
],
responses: {
- 201: {
- headersMapper: PageBlobUploadPagesFromURLHeaders
+ 202: {
+ headersMapper: ContainerBreakLeaseHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: PageBlobUploadPagesFromURLHeaders
+ headersMapper: ContainerBreakLeaseHeaders
}
},
isXML: true,
- serializer: serializer$3
+ serializer: serializer$1
};
-var getPageRangesOperationSpec = {
- httpMethod: "GET",
- path: "{containerName}/{blob}",
+var changeLeaseOperationSpec = {
+ httpMethod: "PUT",
+ path: "{containerName}",
urlParameters: [
url
],
queryParameters: [
- snapshot,
timeoutInSeconds,
- comp17
+ comp9,
+ restype2
],
headerParameters: [
- range0,
+ leaseId1,
+ proposedLeaseId1,
version,
requestId,
- leaseId0,
+ action4,
ifModifiedSince,
- ifUnmodifiedSince,
- ifMatch,
- ifNoneMatch,
- ifTags
+ ifUnmodifiedSince
],
responses: {
200: {
- bodyMapper: PageList,
- headersMapper: PageBlobGetPageRangesHeaders
+ headersMapper: ContainerChangeLeaseHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: PageBlobGetPageRangesHeaders
+ headersMapper: ContainerChangeLeaseHeaders
}
},
isXML: true,
- serializer: serializer$3
+ serializer: serializer$1
};
-var getPageRangesDiffOperationSpec = {
+var listBlobFlatSegmentOperationSpec = {
httpMethod: "GET",
- path: "{containerName}/{blob}",
+ path: "{containerName}",
urlParameters: [
url
],
queryParameters: [
- snapshot,
+ prefix,
+ marker0,
+ maxPageSize,
+ include1,
timeoutInSeconds,
- prevsnapshot,
- comp17
+ restype2,
+ comp2
],
headerParameters: [
- prevSnapshotUrl,
- range0,
version,
- requestId,
- leaseId0,
- ifModifiedSince,
- ifUnmodifiedSince,
- ifMatch,
- ifNoneMatch,
- ifTags
+ requestId
],
responses: {
200: {
- bodyMapper: PageList,
- headersMapper: PageBlobGetPageRangesDiffHeaders
+ bodyMapper: ListBlobsFlatSegmentResponse,
+ headersMapper: ContainerListBlobFlatSegmentHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: PageBlobGetPageRangesDiffHeaders
+ headersMapper: ContainerListBlobFlatSegmentHeaders
}
},
isXML: true,
- serializer: serializer$3
+ serializer: serializer$1
};
-var resizeOperationSpec = {
- httpMethod: "PUT",
- path: "{containerName}/{blob}",
+var listBlobHierarchySegmentOperationSpec = {
+ httpMethod: "GET",
+ path: "{containerName}",
urlParameters: [
url
],
queryParameters: [
+ prefix,
+ delimiter,
+ marker0,
+ maxPageSize,
+ include1,
timeoutInSeconds,
- comp0
+ restype2,
+ comp2
],
headerParameters: [
- encryptionScope,
- blobContentLength,
version,
- requestId,
- leaseId0,
- encryptionKey,
- encryptionKeySha256,
- encryptionAlgorithm,
- ifModifiedSince,
- ifUnmodifiedSince,
- ifMatch,
- ifNoneMatch,
- ifTags
+ requestId
],
responses: {
200: {
- headersMapper: PageBlobResizeHeaders
+ bodyMapper: ListBlobsHierarchySegmentResponse,
+ headersMapper: ContainerListBlobHierarchySegmentHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: PageBlobResizeHeaders
+ headersMapper: ContainerListBlobHierarchySegmentHeaders
}
},
isXML: true,
- serializer: serializer$3
+ serializer: serializer$1
};
-var updateSequenceNumberOperationSpec = {
- httpMethod: "PUT",
- path: "{containerName}/{blob}",
+var getAccountInfoOperationSpec$1 = {
+ httpMethod: "GET",
+ path: "{containerName}",
urlParameters: [
url
],
queryParameters: [
- timeoutInSeconds,
+ restype1,
comp0
],
headerParameters: [
- sequenceNumberAction,
- blobSequenceNumber,
- version,
- requestId,
- leaseId0,
- ifModifiedSince,
- ifUnmodifiedSince,
- ifMatch,
- ifNoneMatch,
- ifTags
+ version
],
responses: {
200: {
- headersMapper: PageBlobUpdateSequenceNumberHeaders
- },
- default: {
- bodyMapper: StorageError,
- headersMapper: PageBlobUpdateSequenceNumberHeaders
- }
- },
- isXML: true,
- serializer: serializer$3
-};
-var copyIncrementalOperationSpec = {
- httpMethod: "PUT",
- path: "{containerName}/{blob}",
- urlParameters: [
- url
- ],
- queryParameters: [
- timeoutInSeconds,
- comp18
- ],
- headerParameters: [
- copySource,
- version,
- requestId,
- ifModifiedSince,
- ifUnmodifiedSince,
- ifMatch,
- ifNoneMatch,
- ifTags
- ],
- responses: {
- 202: {
- headersMapper: PageBlobCopyIncrementalHeaders
+ headersMapper: ContainerGetAccountInfoHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: PageBlobCopyIncrementalHeaders
+ headersMapper: ContainerGetAccountInfoHeaders
}
},
isXML: true,
- serializer: serializer$3
+ serializer: serializer$1
};
/*
@@ -41651,85 +41702,213 @@ var copyIncrementalOperationSpec = {
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
*/
-var Mappers$4 = /*#__PURE__*/Object.freeze({
+var Mappers$2 = /*#__PURE__*/Object.freeze({
__proto__: null,
- AppendBlobAppendBlockFromUrlHeaders: AppendBlobAppendBlockFromUrlHeaders,
- AppendBlobAppendBlockHeaders: AppendBlobAppendBlockHeaders,
- AppendBlobCreateHeaders: AppendBlobCreateHeaders,
- AppendBlobSealHeaders: AppendBlobSealHeaders,
- StorageError: StorageError
-});
-
-/*
- * Copyright (c) Microsoft Corporation. All rights reserved.
- * Licensed under the MIT License. See License.txt in the project root for
- * license information.
- *
- * Code generated by Microsoft (R) AutoRest Code Generator.
- * Changes may cause incorrect behavior and will be lost if the code is
- * regenerated.
- */
-/** Class representing a AppendBlob. */
-var AppendBlob = /** @class */ (function () {
- /**
- * Create a AppendBlob.
- * @param {StorageClientContext} client Reference to the service client.
- */
- function AppendBlob(client) {
- this.client = client;
- }
- AppendBlob.prototype.create = function (contentLength, options, callback) {
- return this.client.sendOperationRequest({
- contentLength: contentLength,
- options: options
- }, createOperationSpec$2, callback);
- };
- AppendBlob.prototype.appendBlock = function (body, contentLength, options, callback) {
+ BlobAbortCopyFromURLHeaders: BlobAbortCopyFromURLHeaders,
+ BlobAcquireLeaseHeaders: BlobAcquireLeaseHeaders,
+ BlobBreakLeaseHeaders: BlobBreakLeaseHeaders,
+ BlobChangeLeaseHeaders: BlobChangeLeaseHeaders,
+ BlobCopyFromURLHeaders: BlobCopyFromURLHeaders,
+ BlobCreateSnapshotHeaders: BlobCreateSnapshotHeaders,
+ BlobDeleteHeaders: BlobDeleteHeaders,
+ BlobDownloadHeaders: BlobDownloadHeaders,
+ BlobGetAccessControlHeaders: BlobGetAccessControlHeaders,
+ BlobGetAccountInfoHeaders: BlobGetAccountInfoHeaders,
+ BlobGetPropertiesHeaders: BlobGetPropertiesHeaders,
+ BlobGetTagsHeaders: BlobGetTagsHeaders,
+ BlobQueryHeaders: BlobQueryHeaders,
+ BlobReleaseLeaseHeaders: BlobReleaseLeaseHeaders,
+ BlobRenameHeaders: BlobRenameHeaders,
+ BlobRenewLeaseHeaders: BlobRenewLeaseHeaders,
+ BlobSetAccessControlHeaders: BlobSetAccessControlHeaders,
+ BlobSetExpiryHeaders: BlobSetExpiryHeaders,
+ BlobSetHTTPHeadersHeaders: BlobSetHTTPHeadersHeaders,
+ BlobSetMetadataHeaders: BlobSetMetadataHeaders,
+ BlobSetTagsHeaders: BlobSetTagsHeaders,
+ BlobSetTierHeaders: BlobSetTierHeaders,
+ BlobStartCopyFromURLHeaders: BlobStartCopyFromURLHeaders,
+ BlobTag: BlobTag,
+ BlobTags: BlobTags,
+ BlobUndeleteHeaders: BlobUndeleteHeaders,
+ DataLakeStorageError: DataLakeStorageError,
+ DataLakeStorageErrorError: DataLakeStorageErrorError,
+ DelimitedTextConfiguration: DelimitedTextConfiguration,
+ JsonTextConfiguration: JsonTextConfiguration,
+ QueryFormat: QueryFormat,
+ QueryRequest: QueryRequest,
+ QuerySerialization: QuerySerialization,
+ StorageError: StorageError
+});
+
+/*
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ * Licensed under the MIT License. See License.txt in the project root for
+ * license information.
+ *
+ * Code generated by Microsoft (R) AutoRest Code Generator.
+ * Changes may cause incorrect behavior and will be lost if the code is
+ * regenerated.
+ */
+/** Class representing a Blob. */
+var Blob$1 = /** @class */ (function () {
+ /**
+ * Create a Blob.
+ * @param {StorageClientContext} client Reference to the service client.
+ */
+ function Blob(client) {
+ this.client = client;
+ }
+ Blob.prototype.download = function (options, callback) {
return this.client.sendOperationRequest({
- body: body,
- contentLength: contentLength,
options: options
- }, appendBlockOperationSpec, callback);
+ }, downloadOperationSpec, callback);
};
- AppendBlob.prototype.appendBlockFromUrl = function (sourceUrl, contentLength, options, callback) {
+ Blob.prototype.getProperties = function (options, callback) {
return this.client.sendOperationRequest({
- sourceUrl: sourceUrl,
- contentLength: contentLength,
options: options
- }, appendBlockFromUrlOperationSpec, callback);
+ }, getPropertiesOperationSpec$2, callback);
};
- AppendBlob.prototype.seal = function (options, callback) {
+ Blob.prototype.deleteMethod = function (options, callback) {
return this.client.sendOperationRequest({
options: options
- }, sealOperationSpec, callback);
+ }, deleteMethodOperationSpec$1, callback);
};
- return AppendBlob;
+ Blob.prototype.setAccessControl = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, setAccessControlOperationSpec, callback);
+ };
+ Blob.prototype.getAccessControl = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, getAccessControlOperationSpec, callback);
+ };
+ Blob.prototype.rename = function (renameSource, options, callback) {
+ return this.client.sendOperationRequest({
+ renameSource: renameSource,
+ options: options
+ }, renameOperationSpec, callback);
+ };
+ Blob.prototype.undelete = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, undeleteOperationSpec, callback);
+ };
+ Blob.prototype.setExpiry = function (expiryOptions, options, callback) {
+ return this.client.sendOperationRequest({
+ expiryOptions: expiryOptions,
+ options: options
+ }, setExpiryOperationSpec, callback);
+ };
+ Blob.prototype.setHTTPHeaders = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, setHTTPHeadersOperationSpec, callback);
+ };
+ Blob.prototype.setMetadata = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, setMetadataOperationSpec$1, callback);
+ };
+ Blob.prototype.acquireLease = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, acquireLeaseOperationSpec$1, callback);
+ };
+ Blob.prototype.releaseLease = function (leaseId, options, callback) {
+ return this.client.sendOperationRequest({
+ leaseId: leaseId,
+ options: options
+ }, releaseLeaseOperationSpec$1, callback);
+ };
+ Blob.prototype.renewLease = function (leaseId, options, callback) {
+ return this.client.sendOperationRequest({
+ leaseId: leaseId,
+ options: options
+ }, renewLeaseOperationSpec$1, callback);
+ };
+ Blob.prototype.changeLease = function (leaseId, proposedLeaseId, options, callback) {
+ return this.client.sendOperationRequest({
+ leaseId: leaseId,
+ proposedLeaseId: proposedLeaseId,
+ options: options
+ }, changeLeaseOperationSpec$1, callback);
+ };
+ Blob.prototype.breakLease = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, breakLeaseOperationSpec$1, callback);
+ };
+ Blob.prototype.createSnapshot = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, createSnapshotOperationSpec, callback);
+ };
+ Blob.prototype.startCopyFromURL = function (copySource, options, callback) {
+ return this.client.sendOperationRequest({
+ copySource: copySource,
+ options: options
+ }, startCopyFromURLOperationSpec, callback);
+ };
+ Blob.prototype.copyFromURL = function (copySource, options, callback) {
+ return this.client.sendOperationRequest({
+ copySource: copySource,
+ options: options
+ }, copyFromURLOperationSpec, callback);
+ };
+ Blob.prototype.abortCopyFromURL = function (copyId, options, callback) {
+ return this.client.sendOperationRequest({
+ copyId: copyId,
+ options: options
+ }, abortCopyFromURLOperationSpec, callback);
+ };
+ Blob.prototype.setTier = function (tier, options, callback) {
+ return this.client.sendOperationRequest({
+ tier: tier,
+ options: options
+ }, setTierOperationSpec, callback);
+ };
+ Blob.prototype.getAccountInfo = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, getAccountInfoOperationSpec$2, callback);
+ };
+ Blob.prototype.query = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, queryOperationSpec, callback);
+ };
+ Blob.prototype.getTags = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, getTagsOperationSpec, callback);
+ };
+ Blob.prototype.setTags = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, setTagsOperationSpec, callback);
+ };
+ return Blob;
}());
// Operation Specifications
-var serializer$4 = new coreHttp.Serializer(Mappers$4, true);
-var createOperationSpec$2 = {
- httpMethod: "PUT",
+var serializer$2 = new coreHttp.Serializer(Mappers$2, true);
+var downloadOperationSpec = {
+ httpMethod: "GET",
path: "{containerName}/{blob}",
urlParameters: [
url
],
queryParameters: [
+ snapshot,
+ versionId,
timeoutInSeconds
],
headerParameters: [
- contentLength,
- metadata,
- encryptionScope,
+ range0,
+ rangeGetContentMD5,
+ rangeGetContentCRC64,
version,
requestId,
- blobTagsString,
- blobType1,
- blobContentType,
- blobContentEncoding,
- blobContentLanguage,
- blobContentMD5,
- blobCacheControl,
- blobContentDisposition,
leaseId0,
encryptionKey,
encryptionKeySha256,
@@ -41741,37 +41920,47 @@ var createOperationSpec$2 = {
ifTags
],
responses: {
- 201: {
- headersMapper: AppendBlobCreateHeaders
+ 200: {
+ bodyMapper: {
+ serializedName: "parsedResponse",
+ type: {
+ name: "Stream"
+ }
+ },
+ headersMapper: BlobDownloadHeaders
+ },
+ 206: {
+ bodyMapper: {
+ serializedName: "parsedResponse",
+ type: {
+ name: "Stream"
+ }
+ },
+ headersMapper: BlobDownloadHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: AppendBlobCreateHeaders
+ headersMapper: BlobDownloadHeaders
}
},
isXML: true,
- serializer: serializer$4
+ serializer: serializer$2
};
-var appendBlockOperationSpec = {
- httpMethod: "PUT",
+var getPropertiesOperationSpec$2 = {
+ httpMethod: "HEAD",
path: "{containerName}/{blob}",
urlParameters: [
url
],
queryParameters: [
- timeoutInSeconds,
- comp19
+ snapshot,
+ versionId,
+ timeoutInSeconds
],
headerParameters: [
- contentLength,
- transactionalContentMD5,
- transactionalContentCrc64,
- encryptionScope,
version,
requestId,
leaseId0,
- maxSize,
- appendPosition,
encryptionKey,
encryptionKeySha256,
encryptionAlgorithm,
@@ -41781,334 +41970,219 @@ var appendBlockOperationSpec = {
ifNoneMatch,
ifTags
],
- requestBody: {
- parameterPath: "body",
- mapper: {
- required: true,
- serializedName: "body",
- type: {
- name: "Stream"
- }
- }
- },
- contentType: "application/octet-stream",
responses: {
- 201: {
- headersMapper: AppendBlobAppendBlockHeaders
+ 200: {
+ headersMapper: BlobGetPropertiesHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: AppendBlobAppendBlockHeaders
+ headersMapper: BlobGetPropertiesHeaders
}
},
isXML: true,
- serializer: serializer$4
+ serializer: serializer$2
};
-var appendBlockFromUrlOperationSpec = {
- httpMethod: "PUT",
+var deleteMethodOperationSpec$1 = {
+ httpMethod: "DELETE",
path: "{containerName}/{blob}",
urlParameters: [
url
],
queryParameters: [
- timeoutInSeconds,
- comp19
+ snapshot,
+ versionId,
+ timeoutInSeconds
],
headerParameters: [
- sourceUrl,
- sourceRange1,
- sourceContentMD5,
- sourceContentCrc64,
- contentLength,
- transactionalContentMD5,
- encryptionScope,
+ deleteSnapshots,
version,
requestId,
- encryptionKey,
- encryptionKeySha256,
- encryptionAlgorithm,
leaseId0,
- maxSize,
- appendPosition,
ifModifiedSince,
ifUnmodifiedSince,
ifMatch,
ifNoneMatch,
- ifTags,
- sourceIfModifiedSince,
- sourceIfUnmodifiedSince,
- sourceIfMatch,
- sourceIfNoneMatch
+ ifTags
],
responses: {
- 201: {
- headersMapper: AppendBlobAppendBlockFromUrlHeaders
+ 202: {
+ headersMapper: BlobDeleteHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: AppendBlobAppendBlockFromUrlHeaders
+ headersMapper: BlobDeleteHeaders
}
},
isXML: true,
- serializer: serializer$4
+ serializer: serializer$2
};
-var sealOperationSpec = {
- httpMethod: "PUT",
- path: "{containerName}/{blob}",
+var setAccessControlOperationSpec = {
+ httpMethod: "PATCH",
+ path: "{filesystem}/{path}",
urlParameters: [
url
],
queryParameters: [
timeoutInSeconds,
- comp20
+ action5
],
headerParameters: [
- version,
+ owner,
+ group,
+ posixPermissions,
+ posixAcl,
requestId,
+ version,
leaseId0,
- ifModifiedSince,
- ifUnmodifiedSince,
ifMatch,
ifNoneMatch,
- appendPosition
+ ifModifiedSince,
+ ifUnmodifiedSince
],
responses: {
200: {
- headersMapper: AppendBlobSealHeaders
+ headersMapper: BlobSetAccessControlHeaders
},
default: {
- bodyMapper: StorageError,
- headersMapper: AppendBlobSealHeaders
+ bodyMapper: DataLakeStorageError,
+ headersMapper: BlobSetAccessControlHeaders
}
},
isXML: true,
- serializer: serializer$4
+ serializer: serializer$2
};
-
-/*
- * Copyright (c) Microsoft Corporation. All rights reserved.
- * Licensed under the MIT License. See License.txt in the project root for license information.
- *
- * Code generated by Microsoft (R) AutoRest Code Generator.
- * Changes may cause incorrect behavior and will be lost if the code is regenerated.
- */
-
-var Mappers$5 = /*#__PURE__*/Object.freeze({
- __proto__: null,
- Block: Block,
- BlockBlobCommitBlockListHeaders: BlockBlobCommitBlockListHeaders,
- BlockBlobGetBlockListHeaders: BlockBlobGetBlockListHeaders,
- BlockBlobStageBlockFromURLHeaders: BlockBlobStageBlockFromURLHeaders,
- BlockBlobStageBlockHeaders: BlockBlobStageBlockHeaders,
- BlockBlobUploadHeaders: BlockBlobUploadHeaders,
- BlockList: BlockList,
- BlockLookupList: BlockLookupList,
- StorageError: StorageError
-});
-
-/*
- * Copyright (c) Microsoft Corporation. All rights reserved.
- * Licensed under the MIT License. See License.txt in the project root for
- * license information.
- *
- * Code generated by Microsoft (R) AutoRest Code Generator.
- * Changes may cause incorrect behavior and will be lost if the code is
- * regenerated.
- */
-/** Class representing a BlockBlob. */
-var BlockBlob = /** @class */ (function () {
- /**
- * Create a BlockBlob.
- * @param {StorageClientContext} client Reference to the service client.
- */
- function BlockBlob(client) {
- this.client = client;
- }
- BlockBlob.prototype.upload = function (body, contentLength, options, callback) {
- return this.client.sendOperationRequest({
- body: body,
- contentLength: contentLength,
- options: options
- }, uploadOperationSpec, callback);
- };
- BlockBlob.prototype.stageBlock = function (blockId, contentLength, body, options, callback) {
- return this.client.sendOperationRequest({
- blockId: blockId,
- contentLength: contentLength,
- body: body,
- options: options
- }, stageBlockOperationSpec, callback);
- };
- BlockBlob.prototype.stageBlockFromURL = function (blockId, contentLength, sourceUrl, options, callback) {
- return this.client.sendOperationRequest({
- blockId: blockId,
- contentLength: contentLength,
- sourceUrl: sourceUrl,
- options: options
- }, stageBlockFromURLOperationSpec, callback);
- };
- BlockBlob.prototype.commitBlockList = function (blocks, options, callback) {
- return this.client.sendOperationRequest({
- blocks: blocks,
- options: options
- }, commitBlockListOperationSpec, callback);
- };
- BlockBlob.prototype.getBlockList = function (listType, options, callback) {
- return this.client.sendOperationRequest({
- listType: listType,
- options: options
- }, getBlockListOperationSpec, callback);
- };
- return BlockBlob;
-}());
-// Operation Specifications
-var serializer$5 = new coreHttp.Serializer(Mappers$5, true);
-var uploadOperationSpec = {
- httpMethod: "PUT",
- path: "{containerName}/{blob}",
+var getAccessControlOperationSpec = {
+ httpMethod: "HEAD",
+ path: "{filesystem}/{path}",
urlParameters: [
url
],
queryParameters: [
- timeoutInSeconds
+ timeoutInSeconds,
+ upn,
+ action6
],
headerParameters: [
- transactionalContentMD5,
- contentLength,
- metadata,
- encryptionScope,
- tier0,
- version,
requestId,
- blobTagsString,
- blobType2,
- blobContentType,
- blobContentEncoding,
- blobContentLanguage,
- blobContentMD5,
- blobCacheControl,
- blobContentDisposition,
+ version,
leaseId0,
- encryptionKey,
- encryptionKeySha256,
- encryptionAlgorithm,
- ifModifiedSince,
- ifUnmodifiedSince,
ifMatch,
ifNoneMatch,
- ifTags
+ ifModifiedSince,
+ ifUnmodifiedSince
],
- requestBody: {
- parameterPath: "body",
- mapper: {
- required: true,
- serializedName: "body",
- type: {
- name: "Stream"
- }
- }
- },
- contentType: "application/octet-stream",
responses: {
- 201: {
- headersMapper: BlockBlobUploadHeaders
+ 200: {
+ headersMapper: BlobGetAccessControlHeaders
},
default: {
- bodyMapper: StorageError,
- headersMapper: BlockBlobUploadHeaders
+ bodyMapper: DataLakeStorageError,
+ headersMapper: BlobGetAccessControlHeaders
}
},
isXML: true,
- serializer: serializer$5
+ serializer: serializer$2
};
-var stageBlockOperationSpec = {
+var renameOperationSpec = {
httpMethod: "PUT",
- path: "{containerName}/{blob}",
+ path: "{filesystem}/{path}",
urlParameters: [
url
],
queryParameters: [
- blockId,
timeoutInSeconds,
- comp21
+ pathRenameMode
],
headerParameters: [
- contentLength,
- transactionalContentMD5,
- transactionalContentCrc64,
- encryptionScope,
+ renameSource,
+ directoryProperties,
+ posixPermissions,
+ posixUmask,
+ sourceLeaseId,
version,
requestId,
+ cacheControl,
+ contentType,
+ contentEncoding,
+ contentLanguage,
+ contentDisposition,
leaseId0,
- encryptionKey,
- encryptionKeySha256,
- encryptionAlgorithm
+ ifModifiedSince,
+ ifUnmodifiedSince,
+ ifMatch,
+ ifNoneMatch,
+ sourceIfModifiedSince,
+ sourceIfUnmodifiedSince,
+ sourceIfMatch,
+ sourceIfNoneMatch
],
- requestBody: {
- parameterPath: "body",
- mapper: {
- required: true,
- serializedName: "body",
- type: {
- name: "Stream"
- }
- }
- },
- contentType: "application/octet-stream",
responses: {
201: {
- headersMapper: BlockBlobStageBlockHeaders
+ headersMapper: BlobRenameHeaders
},
default: {
- bodyMapper: StorageError,
- headersMapper: BlockBlobStageBlockHeaders
+ bodyMapper: DataLakeStorageError,
+ headersMapper: BlobRenameHeaders
}
},
isXML: true,
- serializer: serializer$5
+ serializer: serializer$2
};
-var stageBlockFromURLOperationSpec = {
+var undeleteOperationSpec = {
httpMethod: "PUT",
path: "{containerName}/{blob}",
urlParameters: [
url
],
queryParameters: [
- blockId,
timeoutInSeconds,
- comp21
+ comp8
+ ],
+ headerParameters: [
+ version,
+ requestId
+ ],
+ responses: {
+ 200: {
+ headersMapper: BlobUndeleteHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: BlobUndeleteHeaders
+ }
+ },
+ isXML: true,
+ serializer: serializer$2
+};
+var setExpiryOperationSpec = {
+ httpMethod: "PUT",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ timeoutInSeconds,
+ comp10
],
headerParameters: [
- contentLength,
- sourceUrl,
- sourceRange1,
- sourceContentMD5,
- sourceContentCrc64,
- encryptionScope,
version,
requestId,
- encryptionKey,
- encryptionKeySha256,
- encryptionAlgorithm,
- leaseId0,
- sourceIfModifiedSince,
- sourceIfUnmodifiedSince,
- sourceIfMatch,
- sourceIfNoneMatch
+ expiryOptions,
+ expiresOn
],
responses: {
- 201: {
- headersMapper: BlockBlobStageBlockFromURLHeaders
+ 200: {
+ headersMapper: BlobSetExpiryHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: BlockBlobStageBlockFromURLHeaders
+ headersMapper: BlobSetExpiryHeaders
}
},
isXML: true,
- serializer: serializer$5
+ serializer: serializer$2
};
-var commitBlockListOperationSpec = {
+var setHTTPHeadersOperationSpec = {
httpMethod: "PUT",
path: "{containerName}/{blob}",
urlParameters: [
@@ -42116,24 +42190,52 @@ var commitBlockListOperationSpec = {
],
queryParameters: [
timeoutInSeconds,
- comp22
+ comp0
],
headerParameters: [
- transactionalContentMD5,
- transactionalContentCrc64,
- metadata,
- encryptionScope,
- tier0,
version,
requestId,
- blobTagsString,
blobCacheControl,
blobContentType,
+ blobContentMD5,
blobContentEncoding,
blobContentLanguage,
- blobContentMD5,
blobContentDisposition,
leaseId0,
+ ifModifiedSince,
+ ifUnmodifiedSince,
+ ifMatch,
+ ifNoneMatch,
+ ifTags
+ ],
+ responses: {
+ 200: {
+ headersMapper: BlobSetHTTPHeadersHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: BlobSetHTTPHeadersHeaders
+ }
+ },
+ isXML: true,
+ serializer: serializer$2
+};
+var setMetadataOperationSpec$1 = {
+ httpMethod: "PUT",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ timeoutInSeconds,
+ comp6
+ ],
+ headerParameters: [
+ metadata,
+ encryptionScope,
+ version,
+ requestId,
+ leaseId0,
encryptionKey,
encryptionKeySha256,
encryptionAlgorithm,
@@ -42143,3320 +42245,3147 @@ var commitBlockListOperationSpec = {
ifNoneMatch,
ifTags
],
- requestBody: {
- parameterPath: "blocks",
- mapper: tslib.__assign(tslib.__assign({}, BlockLookupList), { required: true })
+ responses: {
+ 200: {
+ headersMapper: BlobSetMetadataHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: BlobSetMetadataHeaders
+ }
},
- contentType: "application/xml; charset=utf-8",
+ isXML: true,
+ serializer: serializer$2
+};
+var acquireLeaseOperationSpec$1 = {
+ httpMethod: "PUT",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ timeoutInSeconds,
+ comp9
+ ],
+ headerParameters: [
+ duration,
+ proposedLeaseId0,
+ version,
+ requestId,
+ action0,
+ ifModifiedSince,
+ ifUnmodifiedSince,
+ ifMatch,
+ ifNoneMatch,
+ ifTags
+ ],
responses: {
201: {
- headersMapper: BlockBlobCommitBlockListHeaders
+ headersMapper: BlobAcquireLeaseHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: BlockBlobCommitBlockListHeaders
+ headersMapper: BlobAcquireLeaseHeaders
}
},
isXML: true,
- serializer: serializer$5
+ serializer: serializer$2
};
-var getBlockListOperationSpec = {
- httpMethod: "GET",
+var releaseLeaseOperationSpec$1 = {
+ httpMethod: "PUT",
path: "{containerName}/{blob}",
urlParameters: [
url
],
queryParameters: [
- snapshot,
- listType,
timeoutInSeconds,
- comp22
+ comp9
],
headerParameters: [
+ leaseId1,
version,
requestId,
- leaseId0,
+ action1,
+ ifModifiedSince,
+ ifUnmodifiedSince,
+ ifMatch,
+ ifNoneMatch,
ifTags
],
responses: {
200: {
- bodyMapper: BlockList,
- headersMapper: BlockBlobGetBlockListHeaders
+ headersMapper: BlobReleaseLeaseHeaders
},
default: {
bodyMapper: StorageError,
- headersMapper: BlockBlobGetBlockListHeaders
+ headersMapper: BlobReleaseLeaseHeaders
}
},
isXML: true,
- serializer: serializer$5
+ serializer: serializer$2
};
-
-// Copyright (c) Microsoft Corporation.
-/**
- * The @azure/logger configuration for this package.
- */
-var logger = logger$1.createClientLogger("storage-blob");
-
-// Copyright (c) Microsoft Corporation. All rights reserved.
-// Licensed under the MIT License.
-var SDK_VERSION = "12.2.1";
-var SERVICE_VERSION = "2019-12-12";
-var BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB
-var BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB
-var BLOCK_BLOB_MAX_BLOCKS = 50000;
-var DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; // 8MB
-var DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; // 4MB
-var DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5;
-/**
- * The OAuth scope to use with Azure Storage.
- */
-var StorageOAuthScopes = "https://storage.azure.com/.default";
-var URLConstants = {
- Parameters: {
- FORCE_BROWSER_NO_CACHE: "_",
- SIGNATURE: "sig",
- SNAPSHOT: "snapshot",
- VERSIONID: "versionid",
- TIMEOUT: "timeout"
- }
+var renewLeaseOperationSpec$1 = {
+ httpMethod: "PUT",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ timeoutInSeconds,
+ comp9
+ ],
+ headerParameters: [
+ leaseId1,
+ version,
+ requestId,
+ action2,
+ ifModifiedSince,
+ ifUnmodifiedSince,
+ ifMatch,
+ ifNoneMatch,
+ ifTags
+ ],
+ responses: {
+ 200: {
+ headersMapper: BlobRenewLeaseHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: BlobRenewLeaseHeaders
+ }
+ },
+ isXML: true,
+ serializer: serializer$2
};
-var HTTPURLConnection = {
- HTTP_ACCEPTED: 202,
- HTTP_CONFLICT: 409,
- HTTP_NOT_FOUND: 404,
- HTTP_PRECON_FAILED: 412,
- HTTP_RANGE_NOT_SATISFIABLE: 416
+var changeLeaseOperationSpec$1 = {
+ httpMethod: "PUT",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ timeoutInSeconds,
+ comp9
+ ],
+ headerParameters: [
+ leaseId1,
+ proposedLeaseId1,
+ version,
+ requestId,
+ action4,
+ ifModifiedSince,
+ ifUnmodifiedSince,
+ ifMatch,
+ ifNoneMatch,
+ ifTags
+ ],
+ responses: {
+ 200: {
+ headersMapper: BlobChangeLeaseHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: BlobChangeLeaseHeaders
+ }
+ },
+ isXML: true,
+ serializer: serializer$2
};
-var HeaderConstants = {
- AUTHORIZATION: "Authorization",
- AUTHORIZATION_SCHEME: "Bearer",
- CONTENT_ENCODING: "Content-Encoding",
- CONTENT_ID: "Content-ID",
- CONTENT_LANGUAGE: "Content-Language",
- CONTENT_LENGTH: "Content-Length",
- CONTENT_MD5: "Content-Md5",
- CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding",
- CONTENT_TYPE: "Content-Type",
- COOKIE: "Cookie",
- DATE: "date",
- IF_MATCH: "if-match",
- IF_MODIFIED_SINCE: "if-modified-since",
- IF_NONE_MATCH: "if-none-match",
- IF_UNMODIFIED_SINCE: "if-unmodified-since",
- PREFIX_FOR_STORAGE: "x-ms-",
- RANGE: "Range",
- USER_AGENT: "User-Agent",
- X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id",
- X_MS_COPY_SOURCE: "x-ms-copy-source",
- X_MS_DATE: "x-ms-date",
- X_MS_ERROR_CODE: "x-ms-error-code",
- X_MS_VERSION: "x-ms-version"
+var breakLeaseOperationSpec$1 = {
+ httpMethod: "PUT",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ timeoutInSeconds,
+ comp9
+ ],
+ headerParameters: [
+ breakPeriod,
+ version,
+ requestId,
+ action3,
+ ifModifiedSince,
+ ifUnmodifiedSince,
+ ifMatch,
+ ifNoneMatch,
+ ifTags
+ ],
+ responses: {
+ 202: {
+ headersMapper: BlobBreakLeaseHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: BlobBreakLeaseHeaders
+ }
+ },
+ isXML: true,
+ serializer: serializer$2
};
-var ETagNone = "";
-var ETagAny = "*";
-var SIZE_1_MB = 1 * 1024 * 1024;
-var BATCH_MAX_REQUEST = 256;
-var BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB;
-var HTTP_LINE_ENDING = "\r\n";
-var HTTP_VERSION_1_1 = "HTTP/1.1";
-var EncryptionAlgorithmAES25 = "AES256";
-var DevelopmentConnectionString = "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;";
-var StorageBlobLoggingAllowedHeaderNames = [
- "Access-Control-Allow-Origin",
- "Cache-Control",
- "Content-Length",
- "Content-Type",
- "Date",
- "Request-Id",
- "traceparent",
- "Transfer-Encoding",
- "User-Agent",
- "x-ms-client-request-id",
- "x-ms-date",
- "x-ms-error-code",
- "x-ms-request-id",
- "x-ms-return-client-request-id",
- "x-ms-version",
- "Accept-Ranges",
- "Content-Disposition",
- "Content-Encoding",
- "Content-Language",
- "Content-MD5",
- "Content-Range",
- "ETag",
- "Last-Modified",
- "Server",
- "Vary",
- "x-ms-content-crc64",
- "x-ms-copy-action",
- "x-ms-copy-completion-time",
- "x-ms-copy-id",
- "x-ms-copy-progress",
- "x-ms-copy-status",
- "x-ms-has-immutability-policy",
- "x-ms-has-legal-hold",
- "x-ms-lease-state",
- "x-ms-lease-status",
- "x-ms-range",
- "x-ms-request-server-encrypted",
- "x-ms-server-encrypted",
- "x-ms-snapshot",
- "x-ms-source-range",
- "If-Match",
- "If-Modified-Since",
- "If-None-Match",
- "If-Unmodified-Since",
- "x-ms-access-tier",
- "x-ms-access-tier-change-time",
- "x-ms-access-tier-inferred",
- "x-ms-account-kind",
- "x-ms-archive-status",
- "x-ms-blob-append-offset",
- "x-ms-blob-cache-control",
- "x-ms-blob-committed-block-count",
- "x-ms-blob-condition-appendpos",
- "x-ms-blob-condition-maxsize",
- "x-ms-blob-content-disposition",
- "x-ms-blob-content-encoding",
- "x-ms-blob-content-language",
- "x-ms-blob-content-length",
- "x-ms-blob-content-md5",
- "x-ms-blob-content-type",
- "x-ms-blob-public-access",
- "x-ms-blob-sequence-number",
- "x-ms-blob-type",
- "x-ms-copy-destination-snapshot",
- "x-ms-creation-time",
- "x-ms-default-encryption-scope",
- "x-ms-delete-snapshots",
- "x-ms-delete-type-permanent",
- "x-ms-deny-encryption-scope-override",
- "x-ms-encryption-algorithm",
- "x-ms-if-sequence-number-eq",
- "x-ms-if-sequence-number-le",
- "x-ms-if-sequence-number-lt",
- "x-ms-incremental-copy",
- "x-ms-lease-action",
- "x-ms-lease-break-period",
- "x-ms-lease-duration",
- "x-ms-lease-id",
- "x-ms-lease-time",
- "x-ms-page-write",
- "x-ms-proposed-lease-id",
- "x-ms-range-get-content-md5",
- "x-ms-rehydrate-priority",
- "x-ms-sequence-number-action",
- "x-ms-sku-name",
- "x-ms-source-content-md5",
- "x-ms-source-if-match",
- "x-ms-source-if-modified-since",
- "x-ms-source-if-none-match",
- "x-ms-source-if-unmodified-since",
- "x-ms-tag-count",
- "x-ms-encryption-key-sha256",
- "x-ms-if-tags",
- "x-ms-source-if-tags"
-];
-var StorageBlobLoggingAllowedQueryParameters = [
- "comp",
- "maxresults",
- "rscc",
- "rscd",
- "rsce",
- "rscl",
- "rsct",
- "se",
- "si",
- "sip",
- "sp",
- "spr",
- "sr",
- "srt",
- "ss",
- "st",
- "sv",
- "include",
- "marker",
- "prefix",
- "copyid",
- "restype",
- "blockid",
- "blocklisttype",
- "delimiter",
- "prevsnapshot",
- "ske",
- "skoid",
- "sks",
- "skt",
- "sktid",
- "skv",
- "snapshot"
-];
-
-// Copyright (c) Microsoft Corporation. All rights reserved.
-/**
- * Reserved URL characters must be properly escaped for Storage services like Blob or File.
- *
- * ## URL encode and escape strategy for JS SDKs
- *
- * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not.
- * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL
- * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors.
- *
- * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK.
- *
- * This is what legacy V2 SDK does, simple and works for most of the cases.
- * - When customer URL string is "http://account.blob.core.windows.net/con/b:",
- * SDK will encode it to "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created.
- * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A",
- * SDK will encode it to "http://account.blob.core.windows.net/con/b%253A" and send to server. A blob named "b%3A" will be created.
- *
- * But this strategy will make it not possible to create a blob with "?" in it's name. Because when customer URL string is
- * "http://account.blob.core.windows.net/con/blob?name", the "?name" will be treated as URL paramter instead of blob name.
- * If customer URL string is "http://account.blob.core.windows.net/con/blob%3Fname", a blob named "blob%3Fname" will be created.
- * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it.
- * We cannot accept a SDK cannot create a blob name with "?". So we implement strategy two:
- *
- * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters.
- *
- * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped.
- * - When customer URL string is "http://account.blob.core.windows.net/con/b:",
- * SDK will escape ":" like "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created.
- * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A",
- * There is no special characters, so send "http://account.blob.core.windows.net/con/b%3A" to server. A blob named "b:" will be created.
- * - When customer URL string is "http://account.blob.core.windows.net/con/b%253A",
- * There is no special characters, so send "http://account.blob.core.windows.net/con/b%253A" to server. A blob named "b%3A" will be created.
- *
- * This strategy gives us flexibility to create with any special characters. But "%" will be treated as a special characters, if the URL string
- * is not encoded, there shouldn't a "%" in the URL string, otherwise the URL is not a valid URL.
- * If customer needs to create a blob with "%" in it's blob name, use "%25" instead of "%". Just like above 3rd sample.
- * And following URL strings are invalid:
- * - "http://account.blob.core.windows.net/con/b%"
- * - "http://account.blob.core.windows.net/con/b%2"
- * - "http://account.blob.core.windows.net/con/b%G"
- *
- * Another special character is "?", use "%2F" to represent a blob name with "?" in a URL string.
- *
- * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)`
- *
- * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL.
- *
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata
- *
- * @export
- * @param {string} url
- * @returns {string}
- */
-function escapeURLPath(url) {
- var urlParsed = coreHttp.URLBuilder.parse(url);
- var path = urlParsed.getPath();
- path = path || "/";
- path = escape(path);
- urlParsed.setPath(path);
- return urlParsed.toString();
-}
-function getProxyUriFromDevConnString(connectionString) {
- // Development Connection String
- // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key
- var proxyUri = "";
- if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) {
- // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri
- var matchCredentials = connectionString.split(";");
- for (var _i = 0, matchCredentials_1 = matchCredentials; _i < matchCredentials_1.length; _i++) {
- var element = matchCredentials_1[_i];
- if (element.trim().startsWith("DevelopmentStorageProxyUri=")) {
- proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1];
- }
+var createSnapshotOperationSpec = {
+ httpMethod: "PUT",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ timeoutInSeconds,
+ comp11
+ ],
+ headerParameters: [
+ metadata,
+ encryptionScope,
+ version,
+ requestId,
+ encryptionKey,
+ encryptionKeySha256,
+ encryptionAlgorithm,
+ ifModifiedSince,
+ ifUnmodifiedSince,
+ ifMatch,
+ ifNoneMatch,
+ ifTags,
+ leaseId0
+ ],
+ responses: {
+ 201: {
+ headersMapper: BlobCreateSnapshotHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: BlobCreateSnapshotHeaders
}
- }
- return proxyUri;
-}
-function getValueInConnString(connectionString, argument) {
- var elements = connectionString.split(";");
- for (var _i = 0, elements_1 = elements; _i < elements_1.length; _i++) {
- var element = elements_1[_i];
- if (element.trim().startsWith(argument)) {
- return element.trim().match(argument + "=(.*)")[1];
+ },
+ isXML: true,
+ serializer: serializer$2
+};
+var startCopyFromURLOperationSpec = {
+ httpMethod: "PUT",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ timeoutInSeconds
+ ],
+ headerParameters: [
+ metadata,
+ tier0,
+ rehydratePriority,
+ copySource,
+ version,
+ requestId,
+ blobTagsString,
+ sealBlob,
+ sourceIfModifiedSince,
+ sourceIfUnmodifiedSince,
+ sourceIfMatch,
+ sourceIfNoneMatch,
+ sourceIfTags,
+ ifModifiedSince,
+ ifUnmodifiedSince,
+ ifMatch,
+ ifNoneMatch,
+ ifTags,
+ leaseId0
+ ],
+ responses: {
+ 202: {
+ headersMapper: BlobStartCopyFromURLHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: BlobStartCopyFromURLHeaders
}
- }
- return "";
-}
-/**
- * Extracts the parts of an Azure Storage account connection string.
- *
- * @export
- * @param {string} connectionString Connection string.
- * @returns {ConnectionString} String key value pairs of the storage account's url and credentials.
- */
-function extractConnectionStringParts(connectionString) {
- var proxyUri = "";
- if (connectionString.startsWith("UseDevelopmentStorage=true")) {
- // Development connection string
- proxyUri = getProxyUriFromDevConnString(connectionString);
- connectionString = DevelopmentConnectionString;
- }
- // Matching BlobEndpoint in the Account connection string
- var blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint");
- // Slicing off '/' at the end if exists
- // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end)
- blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint;
- if (connectionString.search("DefaultEndpointsProtocol=") !== -1 &&
- connectionString.search("AccountKey=") !== -1) {
- // Account connection string
- var defaultEndpointsProtocol = "";
- var accountName = "";
- var accountKey = Buffer.from("accountKey", "base64");
- var endpointSuffix = "";
- // Get account name and key
- accountName = getValueInConnString(connectionString, "AccountName");
- accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64");
- if (!blobEndpoint) {
- // BlobEndpoint is not present in the Account connection string
- // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`
- defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol");
- var protocol = defaultEndpointsProtocol.toLowerCase();
- if (protocol !== "https" && protocol !== "http") {
- throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'");
- }
- endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix");
- if (!endpointSuffix) {
- throw new Error("Invalid EndpointSuffix in the provided Connection String");
- }
- blobEndpoint = defaultEndpointsProtocol + "://" + accountName + ".blob." + endpointSuffix;
+ },
+ isXML: true,
+ serializer: serializer$2
+};
+var copyFromURLOperationSpec = {
+ httpMethod: "PUT",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ timeoutInSeconds
+ ],
+ headerParameters: [
+ metadata,
+ tier0,
+ copySource,
+ version,
+ requestId,
+ sourceContentMD5,
+ blobTagsString,
+ xMsRequiresSync,
+ sourceIfModifiedSince,
+ sourceIfUnmodifiedSince,
+ sourceIfMatch,
+ sourceIfNoneMatch,
+ ifModifiedSince,
+ ifUnmodifiedSince,
+ ifMatch,
+ ifNoneMatch,
+ ifTags,
+ leaseId0
+ ],
+ responses: {
+ 202: {
+ headersMapper: BlobCopyFromURLHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: BlobCopyFromURLHeaders
}
- if (!accountName) {
- throw new Error("Invalid AccountName in the provided Connection String");
+ },
+ isXML: true,
+ serializer: serializer$2
+};
+var abortCopyFromURLOperationSpec = {
+ httpMethod: "PUT",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ copyId,
+ timeoutInSeconds,
+ comp12
+ ],
+ headerParameters: [
+ version,
+ requestId,
+ copyActionAbortConstant,
+ leaseId0
+ ],
+ responses: {
+ 204: {
+ headersMapper: BlobAbortCopyFromURLHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: BlobAbortCopyFromURLHeaders
}
- else if (accountKey.length === 0) {
- throw new Error("Invalid AccountKey in the provided Connection String");
+ },
+ isXML: true,
+ serializer: serializer$2
+};
+var setTierOperationSpec = {
+ httpMethod: "PUT",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ snapshot,
+ versionId,
+ timeoutInSeconds,
+ comp13
+ ],
+ headerParameters: [
+ tier1,
+ rehydratePriority,
+ version,
+ requestId,
+ leaseId0,
+ ifTags
+ ],
+ responses: {
+ 200: {
+ headersMapper: BlobSetTierHeaders
+ },
+ 202: {
+ headersMapper: BlobSetTierHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: BlobSetTierHeaders
}
- return {
- kind: "AccountConnString",
- url: blobEndpoint,
- accountName: accountName,
- accountKey: accountKey,
- proxyUri: proxyUri
- };
- }
- else {
- // SAS connection string
- var accountSas = getValueInConnString(connectionString, "SharedAccessSignature");
- var accountName = getAccountNameFromUrl(blobEndpoint);
- if (!blobEndpoint) {
- throw new Error("Invalid BlobEndpoint in the provided SAS Connection String");
+ },
+ isXML: true,
+ serializer: serializer$2
+};
+var getAccountInfoOperationSpec$2 = {
+ httpMethod: "GET",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ restype1,
+ comp0
+ ],
+ headerParameters: [
+ version
+ ],
+ responses: {
+ 200: {
+ headersMapper: BlobGetAccountInfoHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: BlobGetAccountInfoHeaders
}
- else if (!accountSas) {
- throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String");
+ },
+ isXML: true,
+ serializer: serializer$2
+};
+var queryOperationSpec = {
+ httpMethod: "POST",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ snapshot,
+ timeoutInSeconds,
+ comp14
+ ],
+ headerParameters: [
+ version,
+ requestId,
+ leaseId0,
+ encryptionKey,
+ encryptionKeySha256,
+ encryptionAlgorithm,
+ ifModifiedSince,
+ ifUnmodifiedSince,
+ ifMatch,
+ ifNoneMatch,
+ ifTags
+ ],
+ requestBody: {
+ parameterPath: [
+ "options",
+ "queryRequest"
+ ],
+ mapper: QueryRequest
+ },
+ contentType: "application/xml; charset=utf-8",
+ responses: {
+ 200: {
+ bodyMapper: {
+ serializedName: "parsedResponse",
+ type: {
+ name: "Stream"
+ }
+ },
+ headersMapper: BlobQueryHeaders
+ },
+ 206: {
+ bodyMapper: {
+ serializedName: "parsedResponse",
+ type: {
+ name: "Stream"
+ }
+ },
+ headersMapper: BlobQueryHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: BlobQueryHeaders
}
- return { kind: "SASConnString", url: blobEndpoint, accountName: accountName, accountSas: accountSas };
- }
-}
-/**
- * Internal escape method implemented Strategy Two mentioned in escapeURL() description.
- *
- * @param {string} text
- * @returns {string}
- */
-function escape(text) {
- return encodeURIComponent(text)
- .replace(/%2F/g, "/") // Don't escape for "/"
- .replace(/'/g, "%27") // Escape for "'"
- .replace(/\+/g, "%20")
- .replace(/%25/g, "%"); // Revert encoded "%"
-}
-/**
- * Append a string to URL path. Will remove duplicated "/" in front of the string
- * when URL path ends with a "/".
- *
- * @export
- * @param {string} url Source URL string
- * @param {string} name String to be appended to URL
- * @returns {string} An updated URL string
- */
-function appendToURLPath(url, name) {
- var urlParsed = coreHttp.URLBuilder.parse(url);
- var path = urlParsed.getPath();
- path = path ? (path.endsWith("/") ? "" + path + name : path + "/" + name) : name;
- urlParsed.setPath(path);
- return urlParsed.toString();
-}
-/**
- * Set URL parameter name and value. If name exists in URL parameters, old value
- * will be replaced by name key. If not provide value, the parameter will be deleted.
+ },
+ isXML: true,
+ serializer: serializer$2
+};
+var getTagsOperationSpec = {
+ httpMethod: "GET",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ timeoutInSeconds,
+ snapshot,
+ versionId,
+ comp15
+ ],
+ headerParameters: [
+ version,
+ requestId,
+ ifTags
+ ],
+ responses: {
+ 200: {
+ bodyMapper: BlobTags,
+ headersMapper: BlobGetTagsHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: BlobGetTagsHeaders
+ }
+ },
+ isXML: true,
+ serializer: serializer$2
+};
+var setTagsOperationSpec = {
+ httpMethod: "PUT",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ timeoutInSeconds,
+ versionId,
+ comp15
+ ],
+ headerParameters: [
+ version,
+ transactionalContentMD5,
+ transactionalContentCrc64,
+ requestId,
+ ifTags
+ ],
+ requestBody: {
+ parameterPath: [
+ "options",
+ "tags"
+ ],
+ mapper: BlobTags
+ },
+ contentType: "application/xml; charset=utf-8",
+ responses: {
+ 204: {
+ headersMapper: BlobSetTagsHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: BlobSetTagsHeaders
+ }
+ },
+ isXML: true,
+ serializer: serializer$2
+};
+
+/*
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ * Licensed under the MIT License. See License.txt in the project root for license information.
*
- * @export
- * @param {string} url Source URL string
- * @param {string} name Parameter name
- * @param {string} [value] Parameter value
- * @returns {string} An updated URL string
+ * Code generated by Microsoft (R) AutoRest Code Generator.
+ * Changes may cause incorrect behavior and will be lost if the code is regenerated.
*/
-function setURLParameter(url, name, value) {
- var urlParsed = coreHttp.URLBuilder.parse(url);
- urlParsed.setQueryParameter(name, value);
- return urlParsed.toString();
-}
-/**
- * Set URL host.
+
+var Mappers$3 = /*#__PURE__*/Object.freeze({
+ __proto__: null,
+ ClearRange: ClearRange,
+ PageBlobClearPagesHeaders: PageBlobClearPagesHeaders,
+ PageBlobCopyIncrementalHeaders: PageBlobCopyIncrementalHeaders,
+ PageBlobCreateHeaders: PageBlobCreateHeaders,
+ PageBlobGetPageRangesDiffHeaders: PageBlobGetPageRangesDiffHeaders,
+ PageBlobGetPageRangesHeaders: PageBlobGetPageRangesHeaders,
+ PageBlobResizeHeaders: PageBlobResizeHeaders,
+ PageBlobUpdateSequenceNumberHeaders: PageBlobUpdateSequenceNumberHeaders,
+ PageBlobUploadPagesFromURLHeaders: PageBlobUploadPagesFromURLHeaders,
+ PageBlobUploadPagesHeaders: PageBlobUploadPagesHeaders,
+ PageList: PageList,
+ PageRange: PageRange,
+ StorageError: StorageError
+});
+
+/*
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ * Licensed under the MIT License. See License.txt in the project root for
+ * license information.
*
- * @export
- * @param {string} url Source URL string
- * @param {string} host New host string
- * @returns An updated URL string
+ * Code generated by Microsoft (R) AutoRest Code Generator.
+ * Changes may cause incorrect behavior and will be lost if the code is
+ * regenerated.
*/
-function setURLHost(url, host) {
- var urlParsed = coreHttp.URLBuilder.parse(url);
- urlParsed.setHost(host);
- return urlParsed.toString();
-}
-/**
- * Get URL path from an URL string.
- *
- * @export
- * @param {string} url Source URL string
- * @returns {(string | undefined)}
- */
-function getURLPath(url) {
- var urlParsed = coreHttp.URLBuilder.parse(url);
- return urlParsed.getPath();
-}
-/**
- * Get URL scheme from an URL string.
- *
- * @export
- * @param {string} url Source URL string
- * @returns {(string | undefined)}
- */
-function getURLScheme(url) {
- var urlParsed = coreHttp.URLBuilder.parse(url);
- return urlParsed.getScheme();
-}
-/**
- * Get URL path and query from an URL string.
- *
- * @export
- * @param {string} url Source URL string
- * @returns {(string | undefined)}
- */
-function getURLPathAndQuery(url) {
- var urlParsed = coreHttp.URLBuilder.parse(url);
- var pathString = urlParsed.getPath();
- if (!pathString) {
- throw new RangeError("Invalid url without valid path.");
- }
- var queryString = urlParsed.getQuery() || "";
- queryString = queryString.trim();
- if (queryString != "") {
- queryString = queryString.startsWith("?") ? queryString : "?" + queryString; // Ensure query string start with '?'
- }
- return "" + pathString + queryString;
-}
-/**
- * Get URL query key value pairs from an URL string.
- *
- * @export
- * @param {string} url
- * @returns {{[key: string]: string}}
- */
-function getURLQueries(url) {
- var queryString = coreHttp.URLBuilder.parse(url).getQuery();
- if (!queryString) {
- return {};
- }
- queryString = queryString.trim();
- queryString = queryString.startsWith("?") ? queryString.substr(1) : queryString;
- var querySubStrings = queryString.split("&");
- querySubStrings = querySubStrings.filter(function (value) {
- var indexOfEqual = value.indexOf("=");
- var lastIndexOfEqual = value.lastIndexOf("=");
- return (indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1);
- });
- var queries = {};
- for (var _i = 0, querySubStrings_1 = querySubStrings; _i < querySubStrings_1.length; _i++) {
- var querySubString = querySubStrings_1[_i];
- var splitResults = querySubString.split("=");
- var key = splitResults[0];
- var value = splitResults[1];
- queries[key] = value;
- }
- return queries;
-}
-/**
- * Rounds a date off to seconds.
- *
- * @export
- * @param {Date} date
- * @param {boolean} [withMilliseconds=true] If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned;
- * If false, YYYY-MM-DDThh:mm:ssZ will be returned.
- * @returns {string} Date string in ISO8061 format, with or without 7 milliseconds component
- */
-function truncatedISO8061Date(date, withMilliseconds) {
- if (withMilliseconds === void 0) { withMilliseconds = true; }
- // Date.toISOString() will return like "2018-10-29T06:34:36.139Z"
- var dateString = date.toISOString();
- return withMilliseconds
- ? dateString.substring(0, dateString.length - 1) + "0000" + "Z"
- : dateString.substring(0, dateString.length - 5) + "Z";
-}
-/**
- * Base64 encode.
- *
- * @export
- * @param {string} content
- * @returns {string}
- */
-function base64encode(content) {
- return !coreHttp.isNode ? btoa(content) : Buffer.from(content).toString("base64");
-}
-/**
- * Generate a 64 bytes base64 block ID string.
- *
- * @export
- * @param {number} blockIndex
- * @returns {string}
- */
-function generateBlockID(blockIDPrefix, blockIndex) {
- // To generate a 64 bytes base64 string, source string should be 48
- var maxSourceStringLength = 48;
- // A blob can have a maximum of 100,000 uncommitted blocks at any given time
- var maxBlockIndexLength = 6;
- var maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength;
- if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) {
- blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength);
- }
- var res = blockIDPrefix +
- padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0");
- return base64encode(res);
-}
-/**
- * Delay specified time interval.
- *
- * @export
- * @param {number} timeInMs
- * @param {AbortSignalLike} [aborter]
- * @param {Error} [abortError]
- */
-function delay(timeInMs, aborter, abortError) {
- return tslib.__awaiter(this, void 0, void 0, function () {
- return tslib.__generator(this, function (_a) {
- return [2 /*return*/, new Promise(function (resolve, reject) {
- var timeout;
- var abortHandler = function () {
- if (timeout !== undefined) {
- clearTimeout(timeout);
- }
- reject(abortError);
- };
- var resolveHandler = function () {
- if (aborter !== undefined) {
- aborter.removeEventListener("abort", abortHandler);
- }
- resolve();
- };
- timeout = setTimeout(resolveHandler, timeInMs);
- if (aborter !== undefined) {
- aborter.addEventListener("abort", abortHandler);
- }
- })];
- });
- });
-}
-/**
- * String.prototype.padStart()
- *
- * @export
- * @param {string} currentString
- * @param {number} targetLength
- * @param {string} [padString=" "]
- * @returns {string}
- */
-function padStart(currentString, targetLength, padString) {
- if (padString === void 0) { padString = " "; }
- // TS doesn't know this code needs to run downlevel sometimes.
- // @ts-expect-error
- if (String.prototype.padStart) {
- return currentString.padStart(targetLength, padString);
- }
- padString = padString || " ";
- if (currentString.length > targetLength) {
- return currentString;
- }
- else {
- targetLength = targetLength - currentString.length;
- if (targetLength > padString.length) {
- padString += padString.repeat(targetLength / padString.length);
- }
- return padString.slice(0, targetLength) + currentString;
- }
-}
-/**
- * If two strings are equal when compared case insensitive.
- *
- * @export
- * @param {string} str1
- * @param {string} str2
- * @returns {boolean}
- */
-function iEqual(str1, str2) {
- return str1.toLocaleLowerCase() === str2.toLocaleLowerCase();
-}
-/**
- * Extracts account name from the url
- * @param {string} url url to extract the account name from
- * @returns {string} with the account name
- */
-function getAccountNameFromUrl(url) {
- var parsedUrl = coreHttp.URLBuilder.parse(url);
- var accountName;
- try {
- if (parsedUrl.getHost().split(".")[1] === "blob") {
- // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`;
- accountName = parsedUrl.getHost().split(".")[0];
- }
- else if (isIpEndpointStyle(parsedUrl)) {
- // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/
- // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/
- // .getPath() -> /devstoreaccount1/
- accountName = parsedUrl.getPath().split("/")[1];
- }
- else {
- // Custom domain case: "https://customdomain.com/containername/blob".
- accountName = "";
- }
- return accountName;
- }
- catch (error) {
- throw new Error("Unable to extract accountName with provided information.");
- }
-}
-function isIpEndpointStyle(parsedUrl) {
- if (parsedUrl.getHost() == undefined) {
- return false;
- }
- var host = parsedUrl.getHost() + (parsedUrl.getPort() == undefined ? "" : ":" + parsedUrl.getPort());
- // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'.
- // Case 2: localhost(:port), use broad regex to match port part.
- // Case 3: Ipv4, use broad regex which just check if host contains Ipv4.
- // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html.
- return /^.*:.*:.*$|^localhost(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host);
-}
-/**
- * Convert Tags to encoded string.
- *
- * @export
- * @param {Tags} tags
- * @returns {string | undefined}
- */
-function toBlobTagsString(tags) {
- if (tags === undefined) {
- return undefined;
- }
- var tagPairs = [];
- for (var key in tags) {
- if (tags.hasOwnProperty(key)) {
- var value = tags[key];
- tagPairs.push(encodeURIComponent(key) + "=" + encodeURIComponent(value));
- }
- }
- return tagPairs.join("&");
-}
-/**
- * Convert Tags type to BlobTags.
- *
- * @export
- * @param {Tags} [tags]
- * @returns {(BlobTags | undefined)}
- */
-function toBlobTags(tags) {
- if (tags === undefined) {
- return undefined;
- }
- var res = {
- blobTagSet: []
- };
- for (var key in tags) {
- if (tags.hasOwnProperty(key)) {
- var value = tags[key];
- res.blobTagSet.push({
- key: key,
- value: value
- });
- }
- }
- return res;
-}
-/**
- * Covert BlobTags to Tags type.
- *
- * @export
- * @param {BlobTags} [tags]
- * @returns {(Tags | undefined)}
- */
-function toTags(tags) {
- if (tags === undefined) {
- return undefined;
- }
- var res = {};
- for (var _i = 0, _a = tags.blobTagSet; _i < _a.length; _i++) {
- var blobTag = _a[_i];
- res[blobTag.key] = blobTag.value;
- }
- return res;
-}
-/**
- * Convert BlobQueryTextConfiguration to QuerySerialization type.
- *
- * @export
- * @param {(BlobQueryJsonTextConfiguration | BlobQueryCsvTextConfiguration)} [textConfiguration]
- * @returns {(QuerySerialization | undefined)}
- */
-function toQuerySerialization(textConfiguration) {
- if (textConfiguration === undefined) {
- return undefined;
- }
- switch (textConfiguration.kind) {
- case "csv":
- return {
- format: {
- type: "delimited",
- delimitedTextConfiguration: {
- columnSeparator: textConfiguration.columnSeparator || ",",
- fieldQuote: textConfiguration.fieldQuote || "",
- recordSeparator: textConfiguration.recordSeparator,
- escapeChar: textConfiguration.escapeCharacter || "",
- headersPresent: textConfiguration.hasHeaders || false
- }
- }
- };
- case "json":
- return {
- format: {
- type: "json",
- jsonTextConfiguration: {
- recordSeparator: textConfiguration.recordSeparator
- }
- }
- };
- default:
- throw Error("Invalid BlobQueryTextConfiguration.");
- }
-}
-function parseObjectReplicationRecord(objectReplicationRecord) {
- if (!objectReplicationRecord) {
- return undefined;
- }
- if ("policy-id" in objectReplicationRecord) {
- // If the dictionary contains a key with policy id, we are not required to do any parsing since
- // the policy id should already be stored in the ObjectReplicationDestinationPolicyId.
- return undefined;
- }
- var orProperties = [];
- var _loop_1 = function (key) {
- var ids = key.split("_");
- var policyPrefix = "or-";
- if (ids[0].startsWith(policyPrefix)) {
- ids[0] = ids[0].substring(policyPrefix.length);
- }
- var rule = {
- ruleId: ids[1],
- replicationStatus: objectReplicationRecord[key]
- };
- var policyIndex = orProperties.findIndex(function (policy) { return policy.policyId === ids[0]; });
- if (policyIndex > -1) {
- orProperties[policyIndex].rules.push(rule);
- }
- else {
- orProperties.push({
- policyId: ids[0],
- rules: [rule]
- });
- }
- };
- for (var key in objectReplicationRecord) {
- _loop_1(key);
- }
- return orProperties;
-}
-
-// Copyright (c) Microsoft Corporation. All rights reserved.
-/**
- * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including:
- *
- * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'.
- * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL
- * thus avoid the browser cache.
- *
- * 2. Remove cookie header for security
- *
- * 3. Remove content-length header to avoid browsers warning
- *
- * @class StorageBrowserPolicy
- * @extends {BaseRequestPolicy}
- */
-var StorageBrowserPolicy = /** @class */ (function (_super) {
- tslib.__extends(StorageBrowserPolicy, _super);
+/** Class representing a PageBlob. */
+var PageBlob = /** @class */ (function () {
/**
- * Creates an instance of StorageBrowserPolicy.
- * @param {RequestPolicy} nextPolicy
- * @param {RequestPolicyOptions} options
- * @memberof StorageBrowserPolicy
+ * Create a PageBlob.
+ * @param {StorageClientContext} client Reference to the service client.
*/
- function StorageBrowserPolicy(nextPolicy, options) {
- return _super.call(this, nextPolicy, options) || this;
+ function PageBlob(client) {
+ this.client = client;
}
- /**
- * Sends out request.
- *
- * @param {WebResource} request
- * @returns {Promise}
- * @memberof StorageBrowserPolicy
- */
- StorageBrowserPolicy.prototype.sendRequest = function (request) {
- return tslib.__awaiter(this, void 0, void 0, function () {
- return tslib.__generator(this, function (_a) {
- {
- return [2 /*return*/, this._nextPolicy.sendRequest(request)];
- }
- });
- });
+ PageBlob.prototype.create = function (contentLength, blobContentLength, options, callback) {
+ return this.client.sendOperationRequest({
+ contentLength: contentLength,
+ blobContentLength: blobContentLength,
+ options: options
+ }, createOperationSpec$1, callback);
};
- return StorageBrowserPolicy;
-}(coreHttp.BaseRequestPolicy));
-
-// Copyright (c) Microsoft Corporation. All rights reserved.
-/**
- * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects.
- *
- * @export
- * @class StorageBrowserPolicyFactory
- * @implements {RequestPolicyFactory}
- */
-var StorageBrowserPolicyFactory = /** @class */ (function () {
- function StorageBrowserPolicyFactory() {
- }
- /**
- * Creates a StorageBrowserPolicyFactory object.
- *
- * @param {RequestPolicy} nextPolicy
- * @param {RequestPolicyOptions} options
- * @returns {StorageBrowserPolicy}
- * @memberof StorageBrowserPolicyFactory
- */
- StorageBrowserPolicyFactory.prototype.create = function (nextPolicy, options) {
- return new StorageBrowserPolicy(nextPolicy, options);
+ PageBlob.prototype.uploadPages = function (body, contentLength, options, callback) {
+ return this.client.sendOperationRequest({
+ body: body,
+ contentLength: contentLength,
+ options: options
+ }, uploadPagesOperationSpec, callback);
};
- return StorageBrowserPolicyFactory;
+ PageBlob.prototype.clearPages = function (contentLength, options, callback) {
+ return this.client.sendOperationRequest({
+ contentLength: contentLength,
+ options: options
+ }, clearPagesOperationSpec, callback);
+ };
+ PageBlob.prototype.uploadPagesFromURL = function (sourceUrl, sourceRange, contentLength, range, options, callback) {
+ return this.client.sendOperationRequest({
+ sourceUrl: sourceUrl,
+ sourceRange: sourceRange,
+ contentLength: contentLength,
+ range: range,
+ options: options
+ }, uploadPagesFromURLOperationSpec, callback);
+ };
+ PageBlob.prototype.getPageRanges = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, getPageRangesOperationSpec, callback);
+ };
+ PageBlob.prototype.getPageRangesDiff = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, getPageRangesDiffOperationSpec, callback);
+ };
+ PageBlob.prototype.resize = function (blobContentLength, options, callback) {
+ return this.client.sendOperationRequest({
+ blobContentLength: blobContentLength,
+ options: options
+ }, resizeOperationSpec, callback);
+ };
+ PageBlob.prototype.updateSequenceNumber = function (sequenceNumberAction, options, callback) {
+ return this.client.sendOperationRequest({
+ sequenceNumberAction: sequenceNumberAction,
+ options: options
+ }, updateSequenceNumberOperationSpec, callback);
+ };
+ PageBlob.prototype.copyIncremental = function (copySource, options, callback) {
+ return this.client.sendOperationRequest({
+ copySource: copySource,
+ options: options
+ }, copyIncrementalOperationSpec, callback);
+ };
+ return PageBlob;
}());
-
-// Copyright (c) Microsoft Corporation. All rights reserved.
-(function (StorageRetryPolicyType) {
- /**
- * Exponential retry. Retry time delay grows exponentially.
- */
- StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL";
- /**
- * Linear retry. Retry time delay grows linearly.
- */
- StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED";
-})(exports.StorageRetryPolicyType || (exports.StorageRetryPolicyType = {}));
-// Default values of StorageRetryOptions
-var DEFAULT_RETRY_OPTIONS = {
- maxRetryDelayInMs: 120 * 1000,
- maxTries: 4,
- retryDelayInMs: 4 * 1000,
- retryPolicyType: exports.StorageRetryPolicyType.EXPONENTIAL,
- secondaryHost: "",
- tryTimeoutInMs: undefined // Use server side default timeout strategy
+// Operation Specifications
+var serializer$3 = new coreHttp.Serializer(Mappers$3, true);
+var createOperationSpec$1 = {
+ httpMethod: "PUT",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ timeoutInSeconds
+ ],
+ headerParameters: [
+ contentLength,
+ tier0,
+ metadata,
+ encryptionScope,
+ blobContentLength,
+ blobSequenceNumber,
+ version,
+ requestId,
+ blobTagsString,
+ blobType0,
+ blobContentType,
+ blobContentEncoding,
+ blobContentLanguage,
+ blobContentMD5,
+ blobCacheControl,
+ blobContentDisposition,
+ leaseId0,
+ encryptionKey,
+ encryptionKeySha256,
+ encryptionAlgorithm,
+ ifModifiedSince,
+ ifUnmodifiedSince,
+ ifMatch,
+ ifNoneMatch,
+ ifTags
+ ],
+ responses: {
+ 201: {
+ headersMapper: PageBlobCreateHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: PageBlobCreateHeaders
+ }
+ },
+ isXML: true,
+ serializer: serializer$3
};
-var RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted.");
-/**
- * Retry policy with exponential retry and linear retry implemented.
+var uploadPagesOperationSpec = {
+ httpMethod: "PUT",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ timeoutInSeconds,
+ comp16
+ ],
+ headerParameters: [
+ contentLength,
+ transactionalContentMD5,
+ transactionalContentCrc64,
+ range0,
+ encryptionScope,
+ version,
+ requestId,
+ pageWrite0,
+ leaseId0,
+ encryptionKey,
+ encryptionKeySha256,
+ encryptionAlgorithm,
+ ifSequenceNumberLessThanOrEqualTo,
+ ifSequenceNumberLessThan,
+ ifSequenceNumberEqualTo,
+ ifModifiedSince,
+ ifUnmodifiedSince,
+ ifMatch,
+ ifNoneMatch,
+ ifTags
+ ],
+ requestBody: {
+ parameterPath: "body",
+ mapper: {
+ required: true,
+ serializedName: "body",
+ type: {
+ name: "Stream"
+ }
+ }
+ },
+ contentType: "application/octet-stream",
+ responses: {
+ 201: {
+ headersMapper: PageBlobUploadPagesHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: PageBlobUploadPagesHeaders
+ }
+ },
+ isXML: true,
+ serializer: serializer$3
+};
+var clearPagesOperationSpec = {
+ httpMethod: "PUT",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ timeoutInSeconds,
+ comp16
+ ],
+ headerParameters: [
+ contentLength,
+ range0,
+ encryptionScope,
+ version,
+ requestId,
+ pageWrite1,
+ leaseId0,
+ encryptionKey,
+ encryptionKeySha256,
+ encryptionAlgorithm,
+ ifSequenceNumberLessThanOrEqualTo,
+ ifSequenceNumberLessThan,
+ ifSequenceNumberEqualTo,
+ ifModifiedSince,
+ ifUnmodifiedSince,
+ ifMatch,
+ ifNoneMatch,
+ ifTags
+ ],
+ responses: {
+ 201: {
+ headersMapper: PageBlobClearPagesHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: PageBlobClearPagesHeaders
+ }
+ },
+ isXML: true,
+ serializer: serializer$3
+};
+var uploadPagesFromURLOperationSpec = {
+ httpMethod: "PUT",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ timeoutInSeconds,
+ comp16
+ ],
+ headerParameters: [
+ sourceUrl,
+ sourceRange0,
+ sourceContentMD5,
+ sourceContentCrc64,
+ contentLength,
+ range1,
+ encryptionScope,
+ version,
+ requestId,
+ pageWrite0,
+ encryptionKey,
+ encryptionKeySha256,
+ encryptionAlgorithm,
+ leaseId0,
+ ifSequenceNumberLessThanOrEqualTo,
+ ifSequenceNumberLessThan,
+ ifSequenceNumberEqualTo,
+ ifModifiedSince,
+ ifUnmodifiedSince,
+ ifMatch,
+ ifNoneMatch,
+ ifTags,
+ sourceIfModifiedSince,
+ sourceIfUnmodifiedSince,
+ sourceIfMatch,
+ sourceIfNoneMatch
+ ],
+ responses: {
+ 201: {
+ headersMapper: PageBlobUploadPagesFromURLHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: PageBlobUploadPagesFromURLHeaders
+ }
+ },
+ isXML: true,
+ serializer: serializer$3
+};
+var getPageRangesOperationSpec = {
+ httpMethod: "GET",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ snapshot,
+ timeoutInSeconds,
+ comp17
+ ],
+ headerParameters: [
+ range0,
+ version,
+ requestId,
+ leaseId0,
+ ifModifiedSince,
+ ifUnmodifiedSince,
+ ifMatch,
+ ifNoneMatch,
+ ifTags
+ ],
+ responses: {
+ 200: {
+ bodyMapper: PageList,
+ headersMapper: PageBlobGetPageRangesHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: PageBlobGetPageRangesHeaders
+ }
+ },
+ isXML: true,
+ serializer: serializer$3
+};
+var getPageRangesDiffOperationSpec = {
+ httpMethod: "GET",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ snapshot,
+ timeoutInSeconds,
+ prevsnapshot,
+ comp17
+ ],
+ headerParameters: [
+ prevSnapshotUrl,
+ range0,
+ version,
+ requestId,
+ leaseId0,
+ ifModifiedSince,
+ ifUnmodifiedSince,
+ ifMatch,
+ ifNoneMatch,
+ ifTags
+ ],
+ responses: {
+ 200: {
+ bodyMapper: PageList,
+ headersMapper: PageBlobGetPageRangesDiffHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: PageBlobGetPageRangesDiffHeaders
+ }
+ },
+ isXML: true,
+ serializer: serializer$3
+};
+var resizeOperationSpec = {
+ httpMethod: "PUT",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ timeoutInSeconds,
+ comp0
+ ],
+ headerParameters: [
+ encryptionScope,
+ blobContentLength,
+ version,
+ requestId,
+ leaseId0,
+ encryptionKey,
+ encryptionKeySha256,
+ encryptionAlgorithm,
+ ifModifiedSince,
+ ifUnmodifiedSince,
+ ifMatch,
+ ifNoneMatch,
+ ifTags
+ ],
+ responses: {
+ 200: {
+ headersMapper: PageBlobResizeHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: PageBlobResizeHeaders
+ }
+ },
+ isXML: true,
+ serializer: serializer$3
+};
+var updateSequenceNumberOperationSpec = {
+ httpMethod: "PUT",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ timeoutInSeconds,
+ comp0
+ ],
+ headerParameters: [
+ sequenceNumberAction,
+ blobSequenceNumber,
+ version,
+ requestId,
+ leaseId0,
+ ifModifiedSince,
+ ifUnmodifiedSince,
+ ifMatch,
+ ifNoneMatch,
+ ifTags
+ ],
+ responses: {
+ 200: {
+ headersMapper: PageBlobUpdateSequenceNumberHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: PageBlobUpdateSequenceNumberHeaders
+ }
+ },
+ isXML: true,
+ serializer: serializer$3
+};
+var copyIncrementalOperationSpec = {
+ httpMethod: "PUT",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ timeoutInSeconds,
+ comp18
+ ],
+ headerParameters: [
+ copySource,
+ version,
+ requestId,
+ ifModifiedSince,
+ ifUnmodifiedSince,
+ ifMatch,
+ ifNoneMatch,
+ ifTags
+ ],
+ responses: {
+ 202: {
+ headersMapper: PageBlobCopyIncrementalHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: PageBlobCopyIncrementalHeaders
+ }
+ },
+ isXML: true,
+ serializer: serializer$3
+};
+
+/*
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ * Licensed under the MIT License. See License.txt in the project root for license information.
*
- * @class RetryPolicy
- * @extends {BaseRequestPolicy}
+ * Code generated by Microsoft (R) AutoRest Code Generator.
+ * Changes may cause incorrect behavior and will be lost if the code is regenerated.
*/
-var StorageRetryPolicy = /** @class */ (function (_super) {
- tslib.__extends(StorageRetryPolicy, _super);
+
+var Mappers$4 = /*#__PURE__*/Object.freeze({
+ __proto__: null,
+ AppendBlobAppendBlockFromUrlHeaders: AppendBlobAppendBlockFromUrlHeaders,
+ AppendBlobAppendBlockHeaders: AppendBlobAppendBlockHeaders,
+ AppendBlobCreateHeaders: AppendBlobCreateHeaders,
+ AppendBlobSealHeaders: AppendBlobSealHeaders,
+ StorageError: StorageError
+});
+
+/*
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ * Licensed under the MIT License. See License.txt in the project root for
+ * license information.
+ *
+ * Code generated by Microsoft (R) AutoRest Code Generator.
+ * Changes may cause incorrect behavior and will be lost if the code is
+ * regenerated.
+ */
+/** Class representing a AppendBlob. */
+var AppendBlob = /** @class */ (function () {
/**
- * Creates an instance of RetryPolicy.
- *
- * @param {RequestPolicy} nextPolicy
- * @param {RequestPolicyOptions} options
- * @param {StorageRetryOptions} [retryOptions=DEFAULT_RETRY_OPTIONS]
- * @memberof StorageRetryPolicy
+ * Create a AppendBlob.
+ * @param {StorageClientContext} client Reference to the service client.
*/
- function StorageRetryPolicy(nextPolicy, options, retryOptions) {
- if (retryOptions === void 0) { retryOptions = DEFAULT_RETRY_OPTIONS; }
- var _this = _super.call(this, nextPolicy, options) || this;
- // Initialize retry options
- _this.retryOptions = {
- retryPolicyType: retryOptions.retryPolicyType
- ? retryOptions.retryPolicyType
- : DEFAULT_RETRY_OPTIONS.retryPolicyType,
- maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1
- ? Math.floor(retryOptions.maxTries)
- : DEFAULT_RETRY_OPTIONS.maxTries,
- tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0
- ? retryOptions.tryTimeoutInMs
- : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs,
- retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0
- ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs
- ? retryOptions.maxRetryDelayInMs
- : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs)
- : DEFAULT_RETRY_OPTIONS.retryDelayInMs,
- maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0
- ? retryOptions.maxRetryDelayInMs
- : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs,
- secondaryHost: retryOptions.secondaryHost
- ? retryOptions.secondaryHost
- : DEFAULT_RETRY_OPTIONS.secondaryHost
- };
- return _this;
+ function AppendBlob(client) {
+ this.client = client;
}
- /**
- * Sends request.
- *
- * @param {WebResource} request
- * @returns {Promise}
- * @memberof StorageRetryPolicy
- */
- StorageRetryPolicy.prototype.sendRequest = function (request) {
- return tslib.__awaiter(this, void 0, void 0, function () {
- return tslib.__generator(this, function (_a) {
- return [2 /*return*/, this.attemptSendRequest(request, false, 1)];
- });
- });
+ AppendBlob.prototype.create = function (contentLength, options, callback) {
+ return this.client.sendOperationRequest({
+ contentLength: contentLength,
+ options: options
+ }, createOperationSpec$2, callback);
};
- /**
- * Decide and perform next retry. Won't mutate request parameter.
- *
- * @protected
- * @param {WebResource} request
- * @param {boolean} secondaryHas404 If attempt was against the secondary & it returned a StatusNotFound (404), then
- * the resource was not found. This may be due to replication delay. So, in this
- * case, we'll never try the secondary again for this operation.
- * @param {number} attempt How many retries has been attempted to performed, starting from 1, which includes
- * the attempt will be performed by this method call.
- * @returns {Promise}
- * @memberof StorageRetryPolicy
- */
- StorageRetryPolicy.prototype.attemptSendRequest = function (request, secondaryHas404, attempt) {
- return tslib.__awaiter(this, void 0, void 0, function () {
- var newRequest, isPrimaryRetry, response, err_1;
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0:
- newRequest = request.clone();
- isPrimaryRetry = secondaryHas404 ||
- !this.retryOptions.secondaryHost ||
- !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") ||
- attempt % 2 === 1;
- if (!isPrimaryRetry) {
- newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost);
- }
- // Set the server-side timeout query parameter "timeout=[seconds]"
- if (this.retryOptions.tryTimeoutInMs) {
- newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1000).toString());
- }
- _a.label = 1;
- case 1:
- _a.trys.push([1, 3, , 4]);
- logger.info("RetryPolicy: =====> Try=" + attempt + " " + (isPrimaryRetry ? "Primary" : "Secondary"));
- return [4 /*yield*/, this._nextPolicy.sendRequest(newRequest)];
- case 2:
- response = _a.sent();
- if (!this.shouldRetry(isPrimaryRetry, attempt, response)) {
- return [2 /*return*/, response];
- }
- secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404);
- return [3 /*break*/, 4];
- case 3:
- err_1 = _a.sent();
- logger.error("RetryPolicy: Caught error, message: " + err_1.message + ", code: " + err_1.code);
- if (!this.shouldRetry(isPrimaryRetry, attempt, response, err_1)) {
- throw err_1;
- }
- return [3 /*break*/, 4];
- case 4: return [4 /*yield*/, this.delay(isPrimaryRetry, attempt, request.abortSignal)];
- case 5:
- _a.sent();
- return [4 /*yield*/, this.attemptSendRequest(request, secondaryHas404, ++attempt)];
- case 6: return [2 /*return*/, _a.sent()];
- }
- });
- });
+ AppendBlob.prototype.appendBlock = function (body, contentLength, options, callback) {
+ return this.client.sendOperationRequest({
+ body: body,
+ contentLength: contentLength,
+ options: options
+ }, appendBlockOperationSpec, callback);
};
- /**
- * Decide whether to retry according to last HTTP response and retry counters.
- *
- * @protected
- * @param {boolean} isPrimaryRetry
- * @param {number} attempt
- * @param {HttpOperationResponse} [response]
- * @param {RestError} [err]
- * @returns {boolean}
- * @memberof StorageRetryPolicy
- */
- StorageRetryPolicy.prototype.shouldRetry = function (isPrimaryRetry, attempt, response, err) {
- if (attempt >= this.retryOptions.maxTries) {
- logger.info("RetryPolicy: Attempt(s) " + attempt + " >= maxTries " + this.retryOptions
- .maxTries + ", no further try.");
- return false;
+ AppendBlob.prototype.appendBlockFromUrl = function (sourceUrl, contentLength, options, callback) {
+ return this.client.sendOperationRequest({
+ sourceUrl: sourceUrl,
+ contentLength: contentLength,
+ options: options
+ }, appendBlockFromUrlOperationSpec, callback);
+ };
+ AppendBlob.prototype.seal = function (options, callback) {
+ return this.client.sendOperationRequest({
+ options: options
+ }, sealOperationSpec, callback);
+ };
+ return AppendBlob;
+}());
+// Operation Specifications
+var serializer$4 = new coreHttp.Serializer(Mappers$4, true);
+var createOperationSpec$2 = {
+ httpMethod: "PUT",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ timeoutInSeconds
+ ],
+ headerParameters: [
+ contentLength,
+ metadata,
+ encryptionScope,
+ version,
+ requestId,
+ blobTagsString,
+ blobType1,
+ blobContentType,
+ blobContentEncoding,
+ blobContentLanguage,
+ blobContentMD5,
+ blobCacheControl,
+ blobContentDisposition,
+ leaseId0,
+ encryptionKey,
+ encryptionKeySha256,
+ encryptionAlgorithm,
+ ifModifiedSince,
+ ifUnmodifiedSince,
+ ifMatch,
+ ifNoneMatch,
+ ifTags
+ ],
+ responses: {
+ 201: {
+ headersMapper: AppendBlobCreateHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: AppendBlobCreateHeaders
}
- // Handle network failures, you may need to customize the list when you implement
- // your own http client
- var retriableErrors = [
- "ETIMEDOUT",
- "ESOCKETTIMEDOUT",
- "ECONNREFUSED",
- "ECONNRESET",
- "ENOENT",
- "ENOTFOUND",
- "TIMEOUT",
- "EPIPE",
- "REQUEST_SEND_ERROR" // For default xhr based http client provided in ms-rest-js
- ];
- if (err) {
- for (var _i = 0, retriableErrors_1 = retriableErrors; _i < retriableErrors_1.length; _i++) {
- var retriableError = retriableErrors_1[_i];
- if (err.name.toUpperCase().includes(retriableError) ||
- err.message.toUpperCase().includes(retriableError) ||
- (err.code &&
- err.code
- .toString()
- .toUpperCase()
- .includes(retriableError))) {
- logger.info("RetryPolicy: Network error " + retriableError + " found, will retry.");
- return true;
- }
+ },
+ isXML: true,
+ serializer: serializer$4
+};
+var appendBlockOperationSpec = {
+ httpMethod: "PUT",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ timeoutInSeconds,
+ comp19
+ ],
+ headerParameters: [
+ contentLength,
+ transactionalContentMD5,
+ transactionalContentCrc64,
+ encryptionScope,
+ version,
+ requestId,
+ leaseId0,
+ maxSize,
+ appendPosition,
+ encryptionKey,
+ encryptionKeySha256,
+ encryptionAlgorithm,
+ ifModifiedSince,
+ ifUnmodifiedSince,
+ ifMatch,
+ ifNoneMatch,
+ ifTags
+ ],
+ requestBody: {
+ parameterPath: "body",
+ mapper: {
+ required: true,
+ serializedName: "body",
+ type: {
+ name: "Stream"
}
}
- // If attempt was against the secondary & it returned a StatusNotFound (404), then
- // the resource was not found. This may be due to replication delay. So, in this
- // case, we'll never try the secondary again for this operation.
- if (response || err) {
- var statusCode = response ? response.status : err ? err.statusCode : 0;
- if (!isPrimaryRetry && statusCode === 404) {
- logger.info("RetryPolicy: Secondary access with 404, will retry.");
- return true;
- }
- // Server internal error or server timeout
- if (statusCode === 503 || statusCode === 500) {
- logger.info("RetryPolicy: Will retry for status code " + statusCode + ".");
- return true;
- }
+ },
+ contentType: "application/octet-stream",
+ responses: {
+ 201: {
+ headersMapper: AppendBlobAppendBlockHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: AppendBlobAppendBlockHeaders
}
- return false;
- };
- /**
- * Delay a calculated time between retries.
- *
- * @private
- * @param {boolean} isPrimaryRetry
- * @param {number} attempt
- * @param {AbortSignalLike} [abortSignal]
- * @memberof StorageRetryPolicy
- */
- StorageRetryPolicy.prototype.delay = function (isPrimaryRetry, attempt, abortSignal) {
- return tslib.__awaiter(this, void 0, void 0, function () {
- var delayTimeInMs;
- return tslib.__generator(this, function (_a) {
- delayTimeInMs = 0;
- if (isPrimaryRetry) {
- switch (this.retryOptions.retryPolicyType) {
- case exports.StorageRetryPolicyType.EXPONENTIAL:
- delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs);
- break;
- case exports.StorageRetryPolicyType.FIXED:
- delayTimeInMs = this.retryOptions.retryDelayInMs;
- break;
- }
- }
- else {
- delayTimeInMs = Math.random() * 1000;
- }
- logger.info("RetryPolicy: Delay for " + delayTimeInMs + "ms");
- return [2 /*return*/, delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR)];
- });
- });
- };
- return StorageRetryPolicy;
-}(coreHttp.BaseRequestPolicy));
+ },
+ isXML: true,
+ serializer: serializer$4
+};
+var appendBlockFromUrlOperationSpec = {
+ httpMethod: "PUT",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ timeoutInSeconds,
+ comp19
+ ],
+ headerParameters: [
+ sourceUrl,
+ sourceRange1,
+ sourceContentMD5,
+ sourceContentCrc64,
+ contentLength,
+ transactionalContentMD5,
+ encryptionScope,
+ version,
+ requestId,
+ encryptionKey,
+ encryptionKeySha256,
+ encryptionAlgorithm,
+ leaseId0,
+ maxSize,
+ appendPosition,
+ ifModifiedSince,
+ ifUnmodifiedSince,
+ ifMatch,
+ ifNoneMatch,
+ ifTags,
+ sourceIfModifiedSince,
+ sourceIfUnmodifiedSince,
+ sourceIfMatch,
+ sourceIfNoneMatch
+ ],
+ responses: {
+ 201: {
+ headersMapper: AppendBlobAppendBlockFromUrlHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: AppendBlobAppendBlockFromUrlHeaders
+ }
+ },
+ isXML: true,
+ serializer: serializer$4
+};
+var sealOperationSpec = {
+ httpMethod: "PUT",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ timeoutInSeconds,
+ comp20
+ ],
+ headerParameters: [
+ version,
+ requestId,
+ leaseId0,
+ ifModifiedSince,
+ ifUnmodifiedSince,
+ ifMatch,
+ ifNoneMatch,
+ appendPosition
+ ],
+ responses: {
+ 200: {
+ headersMapper: AppendBlobSealHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: AppendBlobSealHeaders
+ }
+ },
+ isXML: true,
+ serializer: serializer$4
+};
-// Copyright (c) Microsoft Corporation. All rights reserved.
-/**
- * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects.
+/*
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ * Licensed under the MIT License. See License.txt in the project root for license information.
*
- * @export
- * @class StorageRetryPolicyFactory
- * @implements {RequestPolicyFactory}
+ * Code generated by Microsoft (R) AutoRest Code Generator.
+ * Changes may cause incorrect behavior and will be lost if the code is regenerated.
*/
-var StorageRetryPolicyFactory = /** @class */ (function () {
- /**
- * Creates an instance of StorageRetryPolicyFactory.
- * @param {StorageRetryOptions} [retryOptions]
- * @memberof StorageRetryPolicyFactory
- */
- function StorageRetryPolicyFactory(retryOptions) {
- this.retryOptions = retryOptions;
- }
- /**
- * Creates a StorageRetryPolicy object.
- *
- * @param {RequestPolicy} nextPolicy
- * @param {RequestPolicyOptions} options
- * @returns {StorageRetryPolicy}
- * @memberof StorageRetryPolicyFactory
- */
- StorageRetryPolicyFactory.prototype.create = function (nextPolicy, options) {
- return new StorageRetryPolicy(nextPolicy, options, this.retryOptions);
- };
- return StorageRetryPolicyFactory;
-}());
-// Copyright (c) Microsoft Corporation. All rights reserved.
-/**
- * Credential policy used to sign HTTP(S) requests before sending. This is an
- * abstract class.
- *
- * @export
- * @abstract
- * @class CredentialPolicy
- * @extends {BaseRequestPolicy}
- */
-var CredentialPolicy = /** @class */ (function (_super) {
- tslib.__extends(CredentialPolicy, _super);
- function CredentialPolicy() {
- return _super !== null && _super.apply(this, arguments) || this;
- }
- /**
- * Sends out request.
- *
- * @param {WebResource} request
- * @returns {Promise}
- * @memberof CredentialPolicy
- */
- CredentialPolicy.prototype.sendRequest = function (request) {
- return this._nextPolicy.sendRequest(this.signRequest(request));
- };
- /**
- * Child classes must implement this method with request signing. This method
- * will be executed in {@link sendRequest}.
- *
- * @protected
- * @abstract
- * @param {WebResource} request
- * @returns {WebResource}
- * @memberof CredentialPolicy
- */
- CredentialPolicy.prototype.signRequest = function (request) {
- // Child classes must override this method with request signing. This method
- // will be executed in sendRequest().
- return request;
- };
- return CredentialPolicy;
-}(coreHttp.BaseRequestPolicy));
+var Mappers$5 = /*#__PURE__*/Object.freeze({
+ __proto__: null,
+ Block: Block,
+ BlockBlobCommitBlockListHeaders: BlockBlobCommitBlockListHeaders,
+ BlockBlobGetBlockListHeaders: BlockBlobGetBlockListHeaders,
+ BlockBlobStageBlockFromURLHeaders: BlockBlobStageBlockFromURLHeaders,
+ BlockBlobStageBlockHeaders: BlockBlobStageBlockHeaders,
+ BlockBlobUploadHeaders: BlockBlobUploadHeaders,
+ BlockList: BlockList,
+ BlockLookupList: BlockLookupList,
+ StorageError: StorageError
+});
-// Copyright (c) Microsoft Corporation. All rights reserved.
-/**
- * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources
- * or for use with Shared Access Signatures (SAS).
+/*
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ * Licensed under the MIT License. See License.txt in the project root for
+ * license information.
*
- * @export
- * @class AnonymousCredentialPolicy
- * @extends {CredentialPolicy}
+ * Code generated by Microsoft (R) AutoRest Code Generator.
+ * Changes may cause incorrect behavior and will be lost if the code is
+ * regenerated.
*/
-var AnonymousCredentialPolicy = /** @class */ (function (_super) {
- tslib.__extends(AnonymousCredentialPolicy, _super);
+/** Class representing a BlockBlob. */
+var BlockBlob = /** @class */ (function () {
/**
- * Creates an instance of AnonymousCredentialPolicy.
- * @param {RequestPolicy} nextPolicy
- * @param {RequestPolicyOptions} options
- * @memberof AnonymousCredentialPolicy
+ * Create a BlockBlob.
+ * @param {StorageClientContext} client Reference to the service client.
*/
- function AnonymousCredentialPolicy(nextPolicy, options) {
- return _super.call(this, nextPolicy, options) || this;
- }
- return AnonymousCredentialPolicy;
-}(CredentialPolicy));
-
-// Copyright (c) Microsoft Corporation. All rights reserved.
-// Licensed under the MIT License.
-/**
- * Credential is an abstract class for Azure Storage HTTP requests signing. This
- * class will host an credentialPolicyCreator factory which generates CredentialPolicy.
- *
- * @export
- * @abstract
- * @class Credential
- */
-var Credential = /** @class */ (function () {
- function Credential() {
+ function BlockBlob(client) {
+ this.client = client;
}
- /**
- * Creates a RequestPolicy object.
- *
- * @param {RequestPolicy} _nextPolicy
- * @param {RequestPolicyOptions} _options
- * @returns {RequestPolicy}
- * @memberof Credential
- */
- Credential.prototype.create = function (
- // tslint:disable-next-line:variable-name
- _nextPolicy,
- // tslint:disable-next-line:variable-name
- _options) {
- throw new Error("Method should be implemented in children classes.");
+ BlockBlob.prototype.upload = function (body, contentLength, options, callback) {
+ return this.client.sendOperationRequest({
+ body: body,
+ contentLength: contentLength,
+ options: options
+ }, uploadOperationSpec, callback);
};
- return Credential;
-}());
-
-// Copyright (c) Microsoft Corporation. All rights reserved.
-/**
- * AnonymousCredential provides a credentialPolicyCreator member used to create
- * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with
- * HTTP(S) requests that read public resources or for use with Shared Access
- * Signatures (SAS).
- *
- * @export
- * @class AnonymousCredential
- * @extends {Credential}
- */
-var AnonymousCredential = /** @class */ (function (_super) {
- tslib.__extends(AnonymousCredential, _super);
- function AnonymousCredential() {
- return _super !== null && _super.apply(this, arguments) || this;
- }
- /**
- * Creates an {@link AnonymousCredentialPolicy} object.
- *
- * @param {RequestPolicy} nextPolicy
- * @param {RequestPolicyOptions} options
- * @returns {AnonymousCredentialPolicy}
- * @memberof AnonymousCredential
- */
- AnonymousCredential.prototype.create = function (nextPolicy, options) {
- return new AnonymousCredentialPolicy(nextPolicy, options);
+ BlockBlob.prototype.stageBlock = function (blockId, contentLength, body, options, callback) {
+ return this.client.sendOperationRequest({
+ blockId: blockId,
+ contentLength: contentLength,
+ body: body,
+ options: options
+ }, stageBlockOperationSpec, callback);
};
- return AnonymousCredential;
-}(Credential));
-
-// Copyright (c) Microsoft Corporation. All rights reserved.
-/**
- * TelemetryPolicy is a policy used to tag user-agent header for every requests.
- *
- * @class TelemetryPolicy
- * @extends {BaseRequestPolicy}
- */
-var TelemetryPolicy = /** @class */ (function (_super) {
- tslib.__extends(TelemetryPolicy, _super);
- /**
- * Creates an instance of TelemetryPolicy.
- * @param {RequestPolicy} nextPolicy
- * @param {RequestPolicyOptions} options
- * @param {string} telemetry
- * @memberof TelemetryPolicy
- */
- function TelemetryPolicy(nextPolicy, options, telemetry) {
- var _this = _super.call(this, nextPolicy, options) || this;
- _this.telemetry = telemetry;
- return _this;
- }
- /**
- * Sends out request.
- *
- * @param {WebResource} request
- * @returns {Promise}
- * @memberof TelemetryPolicy
- */
- TelemetryPolicy.prototype.sendRequest = function (request) {
- return tslib.__awaiter(this, void 0, void 0, function () {
- return tslib.__generator(this, function (_a) {
- {
- if (!request.headers) {
- request.headers = new coreHttp.HttpHeaders();
- }
- if (!request.headers.get(HeaderConstants.USER_AGENT)) {
- request.headers.set(HeaderConstants.USER_AGENT, this.telemetry);
- }
- }
- return [2 /*return*/, this._nextPolicy.sendRequest(request)];
- });
- });
+ BlockBlob.prototype.stageBlockFromURL = function (blockId, contentLength, sourceUrl, options, callback) {
+ return this.client.sendOperationRequest({
+ blockId: blockId,
+ contentLength: contentLength,
+ sourceUrl: sourceUrl,
+ options: options
+ }, stageBlockFromURLOperationSpec, callback);
};
- return TelemetryPolicy;
-}(coreHttp.BaseRequestPolicy));
-
-// Copyright (c) Microsoft Corporation. All rights reserved.
-/**
- * TelemetryPolicyFactory is a factory class helping generating {@link TelemetryPolicy} objects.
- *
- * @export
- * @class TelemetryPolicyFactory
- * @implements {RequestPolicyFactory}
- */
-var TelemetryPolicyFactory = /** @class */ (function () {
- /**
- * Creates an instance of TelemetryPolicyFactory.
- * @param {UserAgentOptions} [telemetry]
- * @memberof TelemetryPolicyFactory
- */
- function TelemetryPolicyFactory(telemetry) {
- var userAgentInfo = [];
- {
- if (telemetry) {
- // FIXME: replace() only replaces the first space. And we have no idea why we need to replace spaces in the first place.
- // But fixing this would be a breaking change. Logged an issue here: https://github.com/Azure/azure-sdk-for-js/issues/10793
- var telemetryString = (telemetry.userAgentPrefix || "").replace(" ", "");
- if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) {
- userAgentInfo.push(telemetryString);
- }
- }
- // e.g. azsdk-js-storageblob/10.0.0
- var libInfo = "azsdk-js-storageblob/" + SDK_VERSION;
- if (userAgentInfo.indexOf(libInfo) === -1) {
- userAgentInfo.push(libInfo);
+ BlockBlob.prototype.commitBlockList = function (blocks, options, callback) {
+ return this.client.sendOperationRequest({
+ blocks: blocks,
+ options: options
+ }, commitBlockListOperationSpec, callback);
+ };
+ BlockBlob.prototype.getBlockList = function (listType, options, callback) {
+ return this.client.sendOperationRequest({
+ listType: listType,
+ options: options
+ }, getBlockListOperationSpec, callback);
+ };
+ return BlockBlob;
+}());
+// Operation Specifications
+var serializer$5 = new coreHttp.Serializer(Mappers$5, true);
+var uploadOperationSpec = {
+ httpMethod: "PUT",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ timeoutInSeconds
+ ],
+ headerParameters: [
+ transactionalContentMD5,
+ contentLength,
+ metadata,
+ encryptionScope,
+ tier0,
+ version,
+ requestId,
+ blobTagsString,
+ blobType2,
+ blobContentType,
+ blobContentEncoding,
+ blobContentLanguage,
+ blobContentMD5,
+ blobCacheControl,
+ blobContentDisposition,
+ leaseId0,
+ encryptionKey,
+ encryptionKeySha256,
+ encryptionAlgorithm,
+ ifModifiedSince,
+ ifUnmodifiedSince,
+ ifMatch,
+ ifNoneMatch,
+ ifTags
+ ],
+ requestBody: {
+ parameterPath: "body",
+ mapper: {
+ required: true,
+ serializedName: "body",
+ type: {
+ name: "Stream"
}
- // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299)
- var runtimeInfo = "(NODE-VERSION " + process.version + "; " + os.type() + " " + os.release() + ")";
- if (userAgentInfo.indexOf(runtimeInfo) === -1) {
- userAgentInfo.push(runtimeInfo);
+ }
+ },
+ contentType: "application/octet-stream",
+ responses: {
+ 201: {
+ headersMapper: BlockBlobUploadHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: BlockBlobUploadHeaders
+ }
+ },
+ isXML: true,
+ serializer: serializer$5
+};
+var stageBlockOperationSpec = {
+ httpMethod: "PUT",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ blockId,
+ timeoutInSeconds,
+ comp21
+ ],
+ headerParameters: [
+ contentLength,
+ transactionalContentMD5,
+ transactionalContentCrc64,
+ encryptionScope,
+ version,
+ requestId,
+ leaseId0,
+ encryptionKey,
+ encryptionKeySha256,
+ encryptionAlgorithm
+ ],
+ requestBody: {
+ parameterPath: "body",
+ mapper: {
+ required: true,
+ serializedName: "body",
+ type: {
+ name: "Stream"
}
}
- this.telemetryString = userAgentInfo.join(" ");
- }
- /**
- * Creates a TelemetryPolicy object.
- *
- * @param {RequestPolicy} nextPolicy
- * @param {RequestPolicyOptions} options
- * @returns {TelemetryPolicy}
- * @memberof TelemetryPolicyFactory
- */
- TelemetryPolicyFactory.prototype.create = function (nextPolicy, options) {
- return new TelemetryPolicy(nextPolicy, options, this.telemetryString);
- };
- return TelemetryPolicyFactory;
-}());
+ },
+ contentType: "application/octet-stream",
+ responses: {
+ 201: {
+ headersMapper: BlockBlobStageBlockHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: BlockBlobStageBlockHeaders
+ }
+ },
+ isXML: true,
+ serializer: serializer$5
+};
+var stageBlockFromURLOperationSpec = {
+ httpMethod: "PUT",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ blockId,
+ timeoutInSeconds,
+ comp21
+ ],
+ headerParameters: [
+ contentLength,
+ sourceUrl,
+ sourceRange1,
+ sourceContentMD5,
+ sourceContentCrc64,
+ encryptionScope,
+ version,
+ requestId,
+ encryptionKey,
+ encryptionKeySha256,
+ encryptionAlgorithm,
+ leaseId0,
+ sourceIfModifiedSince,
+ sourceIfUnmodifiedSince,
+ sourceIfMatch,
+ sourceIfNoneMatch
+ ],
+ responses: {
+ 201: {
+ headersMapper: BlockBlobStageBlockFromURLHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: BlockBlobStageBlockFromURLHeaders
+ }
+ },
+ isXML: true,
+ serializer: serializer$5
+};
+var commitBlockListOperationSpec = {
+ httpMethod: "PUT",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ timeoutInSeconds,
+ comp22
+ ],
+ headerParameters: [
+ transactionalContentMD5,
+ transactionalContentCrc64,
+ metadata,
+ encryptionScope,
+ tier0,
+ version,
+ requestId,
+ blobTagsString,
+ blobCacheControl,
+ blobContentType,
+ blobContentEncoding,
+ blobContentLanguage,
+ blobContentMD5,
+ blobContentDisposition,
+ leaseId0,
+ encryptionKey,
+ encryptionKeySha256,
+ encryptionAlgorithm,
+ ifModifiedSince,
+ ifUnmodifiedSince,
+ ifMatch,
+ ifNoneMatch,
+ ifTags
+ ],
+ requestBody: {
+ parameterPath: "blocks",
+ mapper: tslib.__assign(tslib.__assign({}, BlockLookupList), { required: true })
+ },
+ contentType: "application/xml; charset=utf-8",
+ responses: {
+ 201: {
+ headersMapper: BlockBlobCommitBlockListHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: BlockBlobCommitBlockListHeaders
+ }
+ },
+ isXML: true,
+ serializer: serializer$5
+};
+var getBlockListOperationSpec = {
+ httpMethod: "GET",
+ path: "{containerName}/{blob}",
+ urlParameters: [
+ url
+ ],
+ queryParameters: [
+ snapshot,
+ listType,
+ timeoutInSeconds,
+ comp22
+ ],
+ headerParameters: [
+ version,
+ requestId,
+ leaseId0,
+ ifTags
+ ],
+ responses: {
+ 200: {
+ bodyMapper: BlockList,
+ headersMapper: BlockBlobGetBlockListHeaders
+ },
+ default: {
+ bodyMapper: StorageError,
+ headersMapper: BlockBlobGetBlockListHeaders
+ }
+ },
+ isXML: true,
+ serializer: serializer$5
+};
// Copyright (c) Microsoft Corporation.
-var _defaultHttpClient = new coreHttp.DefaultHttpClient();
-function getCachedDefaultHttpClient() {
- return _defaultHttpClient;
-}
-
-// Copyright (c) Microsoft Corporation. All rights reserved.
-/**
- * A Pipeline class containing HTTP request policies.
- * You can create a default Pipeline by calling {@link newPipeline}.
- * Or you can create a Pipeline with your own policies by the constructor of Pipeline.
- *
- * Refer to {@link newPipeline} and provided policies before implementing your
- * customized Pipeline.
- *
- * @export
- * @class Pipeline
- */
-var Pipeline = /** @class */ (function () {
- /**
- * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface.
- *
- * @param {RequestPolicyFactory[]} factories
- * @param {PipelineOptions} [options={}]
- * @memberof Pipeline
- */
- function Pipeline(factories, options) {
- if (options === void 0) { options = {}; }
- this.factories = factories;
- // when options.httpClient is not specified, passing in a DefaultHttpClient instance to
- // avoid each client creating its own http client.
- this.options = tslib.__assign(tslib.__assign({}, options), { httpClient: options.httpClient || getCachedDefaultHttpClient() });
- }
- /**
- * Transfer Pipeline object to ServiceClientOptions object which is required by
- * ServiceClient constructor.
- *
- * @returns {ServiceClientOptions} The ServiceClientOptions object from this Pipeline.
- * @memberof Pipeline
- */
- Pipeline.prototype.toServiceClientOptions = function () {
- return {
- httpClient: this.options.httpClient,
- requestPolicyFactories: this.factories
- };
- };
- return Pipeline;
-}());
/**
- * Creates a new Pipeline object with Credential provided.
- *
- * @export
- * @param {StorageSharedKeyCredential | AnonymousCredential | TokenCredential} credential Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the @azure/identity package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.
- * @param {StoragePipelineOptions} [pipelineOptions] Optional. Options.
- * @returns {Pipeline} A new Pipeline object.
+ * The @azure/logger configuration for this package.
*/
-function newPipeline(credential, pipelineOptions) {
- if (pipelineOptions === void 0) { pipelineOptions = {}; }
- if (credential === undefined) {
- credential = new AnonymousCredential();
- }
- // Order is important. Closer to the API at the top & closer to the network at the bottom.
- // The credential's policy factory must appear close to the wire so it can sign any
- // changes made by other factories (like UniqueRequestIDPolicyFactory)
- var telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions);
- var factories = [
- coreHttp.tracingPolicy({ userAgent: telemetryPolicy.telemetryString }),
- coreHttp.keepAlivePolicy(pipelineOptions.keepAliveOptions),
- telemetryPolicy,
- coreHttp.generateClientRequestIdPolicy(),
- new StorageBrowserPolicyFactory(),
- coreHttp.deserializationPolicy(),
- new StorageRetryPolicyFactory(pipelineOptions.retryOptions),
- coreHttp.logPolicy({
- logger: logger.info,
- allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames,
- allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters
- })
- ];
- {
- // policies only available in Node.js runtime, not in browsers
- factories.push(coreHttp.proxyPolicy(pipelineOptions.proxyOptions));
- factories.push(coreHttp.disableResponseDecompressionPolicy());
- }
- factories.push(coreHttp.isTokenCredential(credential)
- ? coreHttp.bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes)
- : credential);
- return new Pipeline(factories, pipelineOptions);
-}
+var logger = logger$1.createClientLogger("storage-blob");
// Copyright (c) Microsoft Corporation. All rights reserved.
-var ABORT_ERROR = new abortController.AbortError("The operation was aborted.");
+// Licensed under the MIT License.
+var SDK_VERSION = "12.2.1";
+var SERVICE_VERSION = "2019-12-12";
+var BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB
+var BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB
+var BLOCK_BLOB_MAX_BLOCKS = 50000;
+var DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; // 8MB
+var DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; // 4MB
+var DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5;
/**
- * ONLY AVAILABLE IN NODE.JS RUNTIME.
- *
- * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends.
- *
- * @class RetriableReadableStream
- * @extends {Readable}
+ * The OAuth scope to use with Azure Storage.
*/
-var RetriableReadableStream = /** @class */ (function (_super) {
- tslib.__extends(RetriableReadableStream, _super);
- /**
- * Creates an instance of RetriableReadableStream.
- *
- * @param {NodeJS.ReadableStream} source The current ReadableStream returned from getter
- * @param {ReadableStreamGetter} getter A method calling downloading request returning
- * a new ReadableStream from specified offset
- * @param {number} offset Offset position in original data source to read
- * @param {number} count How much data in original data source to read
- * @param {RetriableReadableStreamOptions} [options={}]
- * @memberof RetriableReadableStream
- */
- function RetriableReadableStream(source, getter, offset, count, options) {
- if (options === void 0) { options = {}; }
- var _this = _super.call(this) || this;
- _this.retries = 0;
- _this.abortHandler = function () {
- _this.source.pause();
- _this.emit("error", ABORT_ERROR);
- };
- _this.aborter = options.abortSignal || abortController.AbortSignal.none;
- _this.getter = getter;
- _this.source = source;
- _this.start = offset;
- _this.offset = offset;
- _this.end = offset + count - 1;
- _this.maxRetryRequests =
- options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0;
- _this.onProgress = options.onProgress;
- _this.options = options;
- _this.aborter.addEventListener("abort", _this.abortHandler);
- _this.setSourceDataHandler();
- _this.setSourceEndHandler();
- _this.setSourceErrorHandler();
- return _this;
+var StorageOAuthScopes = "https://storage.azure.com/.default";
+var URLConstants = {
+ Parameters: {
+ FORCE_BROWSER_NO_CACHE: "_",
+ SIGNATURE: "sig",
+ SNAPSHOT: "snapshot",
+ VERSIONID: "versionid",
+ TIMEOUT: "timeout"
}
- RetriableReadableStream.prototype._read = function () {
- if (!this.aborter.aborted) {
- this.source.resume();
- }
- };
- RetriableReadableStream.prototype.setSourceDataHandler = function () {
- var _this = this;
- this.source.on("data", function (data) {
- if (_this.options.doInjectErrorOnce) {
- _this.options.doInjectErrorOnce = undefined;
- _this.source.pause();
- _this.source.removeAllListeners("data");
- _this.source.emit("end");
- return;
- }
- // console.log(
- // `Offset: ${this.offset}, Received ${data.length} from internal stream`
- // );
- _this.offset += data.length;
- if (_this.onProgress) {
- _this.onProgress({ loadedBytes: _this.offset - _this.start });
- }
- if (!_this.push(data)) {
- _this.source.pause();
- }
- });
- };
- RetriableReadableStream.prototype.setSourceEndHandler = function () {
- var _this = this;
- this.source.on("end", function () {
- // console.log(
- // `Source stream emits end, offset: ${
- // this.offset
- // }, dest end : ${this.end}`
- // );
- if (_this.offset - 1 === _this.end) {
- _this.aborter.removeEventListener("abort", _this.abortHandler);
- _this.push(null);
- }
- else if (_this.offset <= _this.end) {
- // console.log(
- // `retries: ${this.retries}, max retries: ${this.maxRetries}`
- // );
- if (_this.retries < _this.maxRetryRequests) {
- _this.retries += 1;
- _this.getter(_this.offset)
- .then(function (newSource) {
- _this.source = newSource;
- _this.setSourceDataHandler();
- _this.setSourceEndHandler();
- _this.setSourceErrorHandler();
- })
- .catch(function (error) {
- _this.emit("error", error);
- });
- }
- else {
- _this.emit("error", new Error(
- // tslint:disable-next-line:max-line-length
- "Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: " + (_this
- .offset - 1) + ", data needed offset: " + _this.end + ", retries: " + _this.retries + ", max retries: " + _this.maxRetryRequests));
- }
- }
- else {
- _this.emit("error", new Error("Data corruption failure: Received more data than original request, data needed offset is " + _this.end + ", received offset: " + (_this.offset - 1)));
- }
- });
- };
- RetriableReadableStream.prototype.setSourceErrorHandler = function () {
- var _this = this;
- this.source.on("error", function (error) {
- _this.emit("error", error);
- });
- };
- return RetriableReadableStream;
-}(stream.Readable));
+};
+var HTTPURLConnection = {
+ HTTP_ACCEPTED: 202,
+ HTTP_CONFLICT: 409,
+ HTTP_NOT_FOUND: 404,
+ HTTP_PRECON_FAILED: 412,
+ HTTP_RANGE_NOT_SATISFIABLE: 416
+};
+var HeaderConstants = {
+ AUTHORIZATION: "Authorization",
+ AUTHORIZATION_SCHEME: "Bearer",
+ CONTENT_ENCODING: "Content-Encoding",
+ CONTENT_ID: "Content-ID",
+ CONTENT_LANGUAGE: "Content-Language",
+ CONTENT_LENGTH: "Content-Length",
+ CONTENT_MD5: "Content-Md5",
+ CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding",
+ CONTENT_TYPE: "Content-Type",
+ COOKIE: "Cookie",
+ DATE: "date",
+ IF_MATCH: "if-match",
+ IF_MODIFIED_SINCE: "if-modified-since",
+ IF_NONE_MATCH: "if-none-match",
+ IF_UNMODIFIED_SINCE: "if-unmodified-since",
+ PREFIX_FOR_STORAGE: "x-ms-",
+ RANGE: "Range",
+ USER_AGENT: "User-Agent",
+ X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id",
+ X_MS_COPY_SOURCE: "x-ms-copy-source",
+ X_MS_DATE: "x-ms-date",
+ X_MS_ERROR_CODE: "x-ms-error-code",
+ X_MS_VERSION: "x-ms-version"
+};
+var ETagNone = "";
+var ETagAny = "*";
+var SIZE_1_MB = 1 * 1024 * 1024;
+var BATCH_MAX_REQUEST = 256;
+var BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB;
+var HTTP_LINE_ENDING = "\r\n";
+var HTTP_VERSION_1_1 = "HTTP/1.1";
+var EncryptionAlgorithmAES25 = "AES256";
+var DevelopmentConnectionString = "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;";
+var StorageBlobLoggingAllowedHeaderNames = [
+ "Access-Control-Allow-Origin",
+ "Cache-Control",
+ "Content-Length",
+ "Content-Type",
+ "Date",
+ "Request-Id",
+ "traceparent",
+ "Transfer-Encoding",
+ "User-Agent",
+ "x-ms-client-request-id",
+ "x-ms-date",
+ "x-ms-error-code",
+ "x-ms-request-id",
+ "x-ms-return-client-request-id",
+ "x-ms-version",
+ "Accept-Ranges",
+ "Content-Disposition",
+ "Content-Encoding",
+ "Content-Language",
+ "Content-MD5",
+ "Content-Range",
+ "ETag",
+ "Last-Modified",
+ "Server",
+ "Vary",
+ "x-ms-content-crc64",
+ "x-ms-copy-action",
+ "x-ms-copy-completion-time",
+ "x-ms-copy-id",
+ "x-ms-copy-progress",
+ "x-ms-copy-status",
+ "x-ms-has-immutability-policy",
+ "x-ms-has-legal-hold",
+ "x-ms-lease-state",
+ "x-ms-lease-status",
+ "x-ms-range",
+ "x-ms-request-server-encrypted",
+ "x-ms-server-encrypted",
+ "x-ms-snapshot",
+ "x-ms-source-range",
+ "If-Match",
+ "If-Modified-Since",
+ "If-None-Match",
+ "If-Unmodified-Since",
+ "x-ms-access-tier",
+ "x-ms-access-tier-change-time",
+ "x-ms-access-tier-inferred",
+ "x-ms-account-kind",
+ "x-ms-archive-status",
+ "x-ms-blob-append-offset",
+ "x-ms-blob-cache-control",
+ "x-ms-blob-committed-block-count",
+ "x-ms-blob-condition-appendpos",
+ "x-ms-blob-condition-maxsize",
+ "x-ms-blob-content-disposition",
+ "x-ms-blob-content-encoding",
+ "x-ms-blob-content-language",
+ "x-ms-blob-content-length",
+ "x-ms-blob-content-md5",
+ "x-ms-blob-content-type",
+ "x-ms-blob-public-access",
+ "x-ms-blob-sequence-number",
+ "x-ms-blob-type",
+ "x-ms-copy-destination-snapshot",
+ "x-ms-creation-time",
+ "x-ms-default-encryption-scope",
+ "x-ms-delete-snapshots",
+ "x-ms-delete-type-permanent",
+ "x-ms-deny-encryption-scope-override",
+ "x-ms-encryption-algorithm",
+ "x-ms-if-sequence-number-eq",
+ "x-ms-if-sequence-number-le",
+ "x-ms-if-sequence-number-lt",
+ "x-ms-incremental-copy",
+ "x-ms-lease-action",
+ "x-ms-lease-break-period",
+ "x-ms-lease-duration",
+ "x-ms-lease-id",
+ "x-ms-lease-time",
+ "x-ms-page-write",
+ "x-ms-proposed-lease-id",
+ "x-ms-range-get-content-md5",
+ "x-ms-rehydrate-priority",
+ "x-ms-sequence-number-action",
+ "x-ms-sku-name",
+ "x-ms-source-content-md5",
+ "x-ms-source-if-match",
+ "x-ms-source-if-modified-since",
+ "x-ms-source-if-none-match",
+ "x-ms-source-if-unmodified-since",
+ "x-ms-tag-count",
+ "x-ms-encryption-key-sha256",
+ "x-ms-if-tags",
+ "x-ms-source-if-tags"
+];
+var StorageBlobLoggingAllowedQueryParameters = [
+ "comp",
+ "maxresults",
+ "rscc",
+ "rscd",
+ "rsce",
+ "rscl",
+ "rsct",
+ "se",
+ "si",
+ "sip",
+ "sp",
+ "spr",
+ "sr",
+ "srt",
+ "ss",
+ "st",
+ "sv",
+ "include",
+ "marker",
+ "prefix",
+ "copyid",
+ "restype",
+ "blockid",
+ "blocklisttype",
+ "delimiter",
+ "prevsnapshot",
+ "ske",
+ "skoid",
+ "sks",
+ "skt",
+ "sktid",
+ "skv",
+ "snapshot"
+];
// Copyright (c) Microsoft Corporation. All rights reserved.
/**
- * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ * Reserved URL characters must be properly escaped for Storage services like Blob or File.
*
- * BlobDownloadResponse implements BlobDownloadResponseParsed interface, and in Node.js runtime it will
- * automatically retry when internal read stream unexpected ends. (This kind of unexpected ends cannot
- * trigger retries defined in pipeline retry policy.)
+ * ## URL encode and escape strategy for JS SDKs
*
- * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js
- * Readable stream.
+ * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not.
+ * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL
+ * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors.
+ *
+ * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK.
+ *
+ * This is what legacy V2 SDK does, simple and works for most of the cases.
+ * - When customer URL string is "http://account.blob.core.windows.net/con/b:",
+ * SDK will encode it to "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created.
+ * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A",
+ * SDK will encode it to "http://account.blob.core.windows.net/con/b%253A" and send to server. A blob named "b%3A" will be created.
+ *
+ * But this strategy will make it not possible to create a blob with "?" in it's name. Because when customer URL string is
+ * "http://account.blob.core.windows.net/con/blob?name", the "?name" will be treated as URL paramter instead of blob name.
+ * If customer URL string is "http://account.blob.core.windows.net/con/blob%3Fname", a blob named "blob%3Fname" will be created.
+ * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it.
+ * We cannot accept a SDK cannot create a blob name with "?". So we implement strategy two:
+ *
+ * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters.
+ *
+ * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped.
+ * - When customer URL string is "http://account.blob.core.windows.net/con/b:",
+ * SDK will escape ":" like "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created.
+ * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A",
+ * There is no special characters, so send "http://account.blob.core.windows.net/con/b%3A" to server. A blob named "b:" will be created.
+ * - When customer URL string is "http://account.blob.core.windows.net/con/b%253A",
+ * There is no special characters, so send "http://account.blob.core.windows.net/con/b%253A" to server. A blob named "b%3A" will be created.
+ *
+ * This strategy gives us flexibility to create with any special characters. But "%" will be treated as a special characters, if the URL string
+ * is not encoded, there shouldn't a "%" in the URL string, otherwise the URL is not a valid URL.
+ * If customer needs to create a blob with "%" in it's blob name, use "%25" instead of "%". Just like above 3rd sample.
+ * And following URL strings are invalid:
+ * - "http://account.blob.core.windows.net/con/b%"
+ * - "http://account.blob.core.windows.net/con/b%2"
+ * - "http://account.blob.core.windows.net/con/b%G"
+ *
+ * Another special character is "?", use "%2F" to represent a blob name with "?" in a URL string.
+ *
+ * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)`
+ *
+ * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL.
+ *
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata
*
* @export
- * @class BlobDownloadResponse
- * @implements {BlobDownloadResponseParsed}
+ * @param {string} url
+ * @returns {string}
*/
-var BlobDownloadResponse = /** @class */ (function () {
- /**
- * Creates an instance of BlobDownloadResponse.
- *
- * @param {BlobDownloadResponseParsed} originalResponse
- * @param {ReadableStreamGetter} getter
- * @param {number} offset
- * @param {number} count
- * @param {RetriableReadableStreamOptions} [options={}]
- * @memberof BlobDownloadResponse
- */
- function BlobDownloadResponse(originalResponse, getter, offset, count, options) {
- if (options === void 0) { options = {}; }
- this.originalResponse = originalResponse;
- this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options);
+function escapeURLPath(url) {
+ var urlParsed = coreHttp.URLBuilder.parse(url);
+ var path = urlParsed.getPath();
+ path = path || "/";
+ path = escape(path);
+ urlParsed.setPath(path);
+ return urlParsed.toString();
+}
+function getProxyUriFromDevConnString(connectionString) {
+ // Development Connection String
+ // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key
+ var proxyUri = "";
+ if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) {
+ // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri
+ var matchCredentials = connectionString.split(";");
+ for (var _i = 0, matchCredentials_1 = matchCredentials; _i < matchCredentials_1.length; _i++) {
+ var element = matchCredentials_1[_i];
+ if (element.trim().startsWith("DevelopmentStorageProxyUri=")) {
+ proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1];
+ }
+ }
}
- Object.defineProperty(BlobDownloadResponse.prototype, "acceptRanges", {
- /**
- * Indicates that the service supports
- * requests for partial file content.
- *
- * @readonly
- * @type {(string | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.acceptRanges;
- },
- enumerable: false,
- configurable: true
+ return proxyUri;
+}
+function getValueInConnString(connectionString, argument) {
+ var elements = connectionString.split(";");
+ for (var _i = 0, elements_1 = elements; _i < elements_1.length; _i++) {
+ var element = elements_1[_i];
+ if (element.trim().startsWith(argument)) {
+ return element.trim().match(argument + "=(.*)")[1];
+ }
+ }
+ return "";
+}
+/**
+ * Extracts the parts of an Azure Storage account connection string.
+ *
+ * @export
+ * @param {string} connectionString Connection string.
+ * @returns {ConnectionString} String key value pairs of the storage account's url and credentials.
+ */
+function extractConnectionStringParts(connectionString) {
+ var proxyUri = "";
+ if (connectionString.startsWith("UseDevelopmentStorage=true")) {
+ // Development connection string
+ proxyUri = getProxyUriFromDevConnString(connectionString);
+ connectionString = DevelopmentConnectionString;
+ }
+ // Matching BlobEndpoint in the Account connection string
+ var blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint");
+ // Slicing off '/' at the end if exists
+ // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end)
+ blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint;
+ if (connectionString.search("DefaultEndpointsProtocol=") !== -1 &&
+ connectionString.search("AccountKey=") !== -1) {
+ // Account connection string
+ var defaultEndpointsProtocol = "";
+ var accountName = "";
+ var accountKey = Buffer.from("accountKey", "base64");
+ var endpointSuffix = "";
+ // Get account name and key
+ accountName = getValueInConnString(connectionString, "AccountName");
+ accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64");
+ if (!blobEndpoint) {
+ // BlobEndpoint is not present in the Account connection string
+ // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`
+ defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol");
+ var protocol = defaultEndpointsProtocol.toLowerCase();
+ if (protocol !== "https" && protocol !== "http") {
+ throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'");
+ }
+ endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix");
+ if (!endpointSuffix) {
+ throw new Error("Invalid EndpointSuffix in the provided Connection String");
+ }
+ blobEndpoint = defaultEndpointsProtocol + "://" + accountName + ".blob." + endpointSuffix;
+ }
+ if (!accountName) {
+ throw new Error("Invalid AccountName in the provided Connection String");
+ }
+ else if (accountKey.length === 0) {
+ throw new Error("Invalid AccountKey in the provided Connection String");
+ }
+ return {
+ kind: "AccountConnString",
+ url: blobEndpoint,
+ accountName: accountName,
+ accountKey: accountKey,
+ proxyUri: proxyUri
+ };
+ }
+ else {
+ // SAS connection string
+ var accountSas = getValueInConnString(connectionString, "SharedAccessSignature");
+ var accountName = getAccountNameFromUrl(blobEndpoint);
+ if (!blobEndpoint) {
+ throw new Error("Invalid BlobEndpoint in the provided SAS Connection String");
+ }
+ else if (!accountSas) {
+ throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String");
+ }
+ return { kind: "SASConnString", url: blobEndpoint, accountName: accountName, accountSas: accountSas };
+ }
+}
+/**
+ * Internal escape method implemented Strategy Two mentioned in escapeURL() description.
+ *
+ * @param {string} text
+ * @returns {string}
+ */
+function escape(text) {
+ return encodeURIComponent(text)
+ .replace(/%2F/g, "/") // Don't escape for "/"
+ .replace(/'/g, "%27") // Escape for "'"
+ .replace(/\+/g, "%20")
+ .replace(/%25/g, "%"); // Revert encoded "%"
+}
+/**
+ * Append a string to URL path. Will remove duplicated "/" in front of the string
+ * when URL path ends with a "/".
+ *
+ * @export
+ * @param {string} url Source URL string
+ * @param {string} name String to be appended to URL
+ * @returns {string} An updated URL string
+ */
+function appendToURLPath(url, name) {
+ var urlParsed = coreHttp.URLBuilder.parse(url);
+ var path = urlParsed.getPath();
+ path = path ? (path.endsWith("/") ? "" + path + name : path + "/" + name) : name;
+ urlParsed.setPath(path);
+ return urlParsed.toString();
+}
+/**
+ * Set URL parameter name and value. If name exists in URL parameters, old value
+ * will be replaced by name key. If not provide value, the parameter will be deleted.
+ *
+ * @export
+ * @param {string} url Source URL string
+ * @param {string} name Parameter name
+ * @param {string} [value] Parameter value
+ * @returns {string} An updated URL string
+ */
+function setURLParameter(url, name, value) {
+ var urlParsed = coreHttp.URLBuilder.parse(url);
+ urlParsed.setQueryParameter(name, value);
+ return urlParsed.toString();
+}
+/**
+ * Set URL host.
+ *
+ * @export
+ * @param {string} url Source URL string
+ * @param {string} host New host string
+ * @returns An updated URL string
+ */
+function setURLHost(url, host) {
+ var urlParsed = coreHttp.URLBuilder.parse(url);
+ urlParsed.setHost(host);
+ return urlParsed.toString();
+}
+/**
+ * Get URL path from an URL string.
+ *
+ * @export
+ * @param {string} url Source URL string
+ * @returns {(string | undefined)}
+ */
+function getURLPath(url) {
+ var urlParsed = coreHttp.URLBuilder.parse(url);
+ return urlParsed.getPath();
+}
+/**
+ * Get URL scheme from an URL string.
+ *
+ * @export
+ * @param {string} url Source URL string
+ * @returns {(string | undefined)}
+ */
+function getURLScheme(url) {
+ var urlParsed = coreHttp.URLBuilder.parse(url);
+ return urlParsed.getScheme();
+}
+/**
+ * Get URL path and query from an URL string.
+ *
+ * @export
+ * @param {string} url Source URL string
+ * @returns {(string | undefined)}
+ */
+function getURLPathAndQuery(url) {
+ var urlParsed = coreHttp.URLBuilder.parse(url);
+ var pathString = urlParsed.getPath();
+ if (!pathString) {
+ throw new RangeError("Invalid url without valid path.");
+ }
+ var queryString = urlParsed.getQuery() || "";
+ queryString = queryString.trim();
+ if (queryString != "") {
+ queryString = queryString.startsWith("?") ? queryString : "?" + queryString; // Ensure query string start with '?'
+ }
+ return "" + pathString + queryString;
+}
+/**
+ * Get URL query key value pairs from an URL string.
+ *
+ * @export
+ * @param {string} url
+ * @returns {{[key: string]: string}}
+ */
+function getURLQueries(url) {
+ var queryString = coreHttp.URLBuilder.parse(url).getQuery();
+ if (!queryString) {
+ return {};
+ }
+ queryString = queryString.trim();
+ queryString = queryString.startsWith("?") ? queryString.substr(1) : queryString;
+ var querySubStrings = queryString.split("&");
+ querySubStrings = querySubStrings.filter(function (value) {
+ var indexOfEqual = value.indexOf("=");
+ var lastIndexOfEqual = value.lastIndexOf("=");
+ return (indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1);
});
- Object.defineProperty(BlobDownloadResponse.prototype, "cacheControl", {
- /**
- * Returns if it was previously specified
- * for the file.
- *
- * @readonly
- * @type {(string | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.cacheControl;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "contentDisposition", {
- /**
- * Returns the value that was specified
- * for the 'x-ms-content-disposition' header and specifies how to process the
- * response.
- *
- * @readonly
- * @type {(string | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.contentDisposition;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "contentEncoding", {
- /**
- * Returns the value that was specified
- * for the Content-Encoding request header.
- *
- * @readonly
- * @type {(string | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.contentEncoding;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "contentLanguage", {
- /**
- * Returns the value that was specified
- * for the Content-Language request header.
- *
- * @readonly
- * @type {(string | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.contentLanguage;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "blobSequenceNumber", {
- /**
- * The current sequence number for a
- * page blob. This header is not returned for block blobs or append blobs.
- *
- * @readonly
- * @type {(number | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.blobSequenceNumber;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "blobType", {
- /**
- * The blob's type. Possible values include:
- * 'BlockBlob', 'PageBlob', 'AppendBlob'.
- *
- * @readonly
- * @type {(BlobType | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.blobType;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "contentLength", {
- /**
- * The number of bytes present in the
- * response body.
- *
- * @readonly
- * @type {(number | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.contentLength;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "contentMD5", {
- /**
- * If the file has an MD5 hash and the
- * request is to read the full file, this response header is returned so that
- * the client can check for message content integrity. If the request is to
- * read a specified range and the 'x-ms-range-get-content-md5' is set to
- * true, then the request returns an MD5 hash for the range, as long as the
- * range size is less than or equal to 4 MB. If neither of these sets of
- * conditions is true, then no value is returned for the 'Content-MD5'
- * header.
- *
- * @readonly
- * @type {(Uint8Array | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.contentMD5;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "contentRange", {
- /**
- * Indicates the range of bytes returned if
- * the client requested a subset of the file by setting the Range request
- * header.
- *
- * @readonly
- * @type {(string | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.contentRange;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "contentType", {
- /**
- * The content type specified for the file.
- * The default content type is 'application/octet-stream'
- *
- * @readonly
- * @type {(string | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.contentType;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "copyCompletedOn", {
- /**
- * Conclusion time of the last attempted
- * Copy File operation where this file was the destination file. This value
- * can specify the time of a completed, aborted, or failed copy attempt.
- *
- * @readonly
- * @type {(Date | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.copyCompletedOn;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "copyId", {
- /**
- * String identifier for the last attempted Copy
- * File operation where this file was the destination file.
- *
- * @readonly
- * @type {(string | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.copyId;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "copyProgress", {
- /**
- * Contains the number of bytes copied and
- * the total bytes in the source in the last attempted Copy File operation
- * where this file was the destination file. Can show between 0 and
- * Content-Length bytes copied.
- *
- * @readonly
- * @type {(string | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.copyProgress;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "copySource", {
- /**
- * URL up to 2KB in length that specifies the
- * source file used in the last attempted Copy File operation where this file
- * was the destination file.
- *
- * @readonly
- * @type {(string | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.copySource;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "copyStatus", {
- /**
- * State of the copy operation
- * identified by 'x-ms-copy-id'. Possible values include: 'pending',
- * 'success', 'aborted', 'failed'
- *
- * @readonly
- * @type {(CopyStatusType | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.copyStatus;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "copyStatusDescription", {
- /**
- * Only appears when
- * x-ms-copy-status is failed or pending. Describes cause of fatal or
- * non-fatal copy operation failure.
- *
- * @readonly
- * @type {(string | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.copyStatusDescription;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "leaseDuration", {
- /**
- * When a blob is leased,
- * specifies whether the lease is of infinite or fixed duration. Possible
- * values include: 'infinite', 'fixed'.
- *
- * @readonly
- * @type {(LeaseDurationType | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.leaseDuration;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "leaseState", {
- /**
- * Lease state of the blob. Possible
- * values include: 'available', 'leased', 'expired', 'breaking', 'broken'.
- *
- * @readonly
- * @type {(LeaseStateType | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.leaseState;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "leaseStatus", {
- /**
- * The current lease status of the
- * blob. Possible values include: 'locked', 'unlocked'.
- *
- * @readonly
- * @type {(LeaseStatusType | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.leaseStatus;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "date", {
- /**
- * A UTC date/time value generated by the service that
- * indicates the time at which the response was initiated.
- *
- * @readonly
- * @type {(Date | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.date;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "blobCommittedBlockCount", {
- /**
- * The number of committed blocks
- * present in the blob. This header is returned only for append blobs.
- *
- * @readonly
- * @type {(number | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.blobCommittedBlockCount;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "etag", {
- /**
- * The ETag contains a value that you can use to
- * perform operations conditionally, in quotes.
- *
- * @readonly
- * @type {(string | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.etag;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "tagCount", {
- /**
- * The number of tags associated with the blob
- *
- * @readonly
- * @type {(number | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.tagCount;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "errorCode", {
- /**
- * The error code.
- *
- * @readonly
- * @type {(string | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.errorCode;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "isServerEncrypted", {
- /**
- * The value of this header is set to
- * true if the file data and application metadata are completely encrypted
- * using the specified algorithm. Otherwise, the value is set to false (when
- * the file is unencrypted, or if only parts of the file/application metadata
- * are encrypted).
- *
- * @readonly
- * @type {(boolean | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.isServerEncrypted;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "blobContentMD5", {
- /**
- * If the blob has a MD5 hash, and if
- * request contains range header (Range or x-ms-range), this response header
- * is returned with the value of the whole blob's MD5 value. This value may
- * or may not be equal to the value returned in Content-MD5 header, with the
- * latter calculated from the requested range.
- *
- * @readonly
- * @type {(Uint8Array | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.blobContentMD5;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "lastModified", {
- /**
- * Returns the date and time the file was last
- * modified. Any operation that modifies the file or its properties updates
- * the last modified time.
- *
- * @readonly
- * @type {(Date | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.lastModified;
- },
- enumerable: false,
- configurable: true
+ var queries = {};
+ for (var _i = 0, querySubStrings_1 = querySubStrings; _i < querySubStrings_1.length; _i++) {
+ var querySubString = querySubStrings_1[_i];
+ var splitResults = querySubString.split("=");
+ var key = splitResults[0];
+ var value = splitResults[1];
+ queries[key] = value;
+ }
+ return queries;
+}
+/**
+ * Rounds a date off to seconds.
+ *
+ * @export
+ * @param {Date} date
+ * @param {boolean} [withMilliseconds=true] If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned;
+ * If false, YYYY-MM-DDThh:mm:ssZ will be returned.
+ * @returns {string} Date string in ISO8061 format, with or without 7 milliseconds component
+ */
+function truncatedISO8061Date(date, withMilliseconds) {
+ if (withMilliseconds === void 0) { withMilliseconds = true; }
+ // Date.toISOString() will return like "2018-10-29T06:34:36.139Z"
+ var dateString = date.toISOString();
+ return withMilliseconds
+ ? dateString.substring(0, dateString.length - 1) + "0000" + "Z"
+ : dateString.substring(0, dateString.length - 5) + "Z";
+}
+/**
+ * Base64 encode.
+ *
+ * @export
+ * @param {string} content
+ * @returns {string}
+ */
+function base64encode(content) {
+ return !coreHttp.isNode ? btoa(content) : Buffer.from(content).toString("base64");
+}
+/**
+ * Generate a 64 bytes base64 block ID string.
+ *
+ * @export
+ * @param {number} blockIndex
+ * @returns {string}
+ */
+function generateBlockID(blockIDPrefix, blockIndex) {
+ // To generate a 64 bytes base64 string, source string should be 48
+ var maxSourceStringLength = 48;
+ // A blob can have a maximum of 100,000 uncommitted blocks at any given time
+ var maxBlockIndexLength = 6;
+ var maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength;
+ if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) {
+ blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength);
+ }
+ var res = blockIDPrefix +
+ padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0");
+ return base64encode(res);
+}
+/**
+ * Delay specified time interval.
+ *
+ * @export
+ * @param {number} timeInMs
+ * @param {AbortSignalLike} [aborter]
+ * @param {Error} [abortError]
+ */
+function delay(timeInMs, aborter, abortError) {
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ return tslib.__generator(this, function (_a) {
+ return [2 /*return*/, new Promise(function (resolve, reject) {
+ var timeout;
+ var abortHandler = function () {
+ if (timeout !== undefined) {
+ clearTimeout(timeout);
+ }
+ reject(abortError);
+ };
+ var resolveHandler = function () {
+ if (aborter !== undefined) {
+ aborter.removeEventListener("abort", abortHandler);
+ }
+ resolve();
+ };
+ timeout = setTimeout(resolveHandler, timeInMs);
+ if (aborter !== undefined) {
+ aborter.addEventListener("abort", abortHandler);
+ }
+ })];
+ });
});
- Object.defineProperty(BlobDownloadResponse.prototype, "metadata", {
- /**
- * A name-value pair
- * to associate with a file storage object.
- *
- * @readonly
- * @type {(Metadata | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.metadata;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "requestId", {
- /**
- * This header uniquely identifies the request
- * that was made and can be used for troubleshooting the request.
- *
- * @readonly
- * @type {(string | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.requestId;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "clientRequestId", {
- /**
- * If a client request id header is sent in the request, this header will be present in the
- * response with the same value.
- *
- * @readonly
- * @type {(string | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.clientRequestId;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "version", {
- /**
- * Indicates the version of the Blob service used
- * to execute the request.
- *
- * @readonly
- * @type {(string | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.version;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "versionId", {
- /**
- * Indicates the versionId of the downloaded blob version.
- *
- * @readonly
- * @type {(string | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.versionId;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "encryptionKeySha256", {
- /**
- * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned
- * when the blob was encrypted with a customer-provided key.
- *
- * @readonly
- * @type {(string | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.encryptionKeySha256;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "contentCrc64", {
- /**
- * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to
- * true, then the request returns a crc64 for the range, as long as the range size is less than
- * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is
- * specified in the same request, it will fail with 400(Bad Request)
- *
- * @type {(Uint8Array | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.contentCrc64;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "objectReplicationDestinationPolicyId", {
- /**
- * Object Replication Policy Id of the destination blob.
- *
- * @readonly
- * @type {(string| undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.objectReplicationDestinationPolicyId;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "objectReplicationSourceProperties", {
- /**
- * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob.
- *
- * @readonly
- * @type {(ObjectReplicationPolicy[] | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.objectReplicationSourceProperties;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "isSealed", {
- /**
- * If this blob has been sealed.
- *
- * @readonly
- * @type {(boolean | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.isSealed;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "contentAsBlob", {
- /**
- * The response body as a browser Blob.
- * Always undefined in node.js.
- *
- * @readonly
- * @type {(Promise | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse.blobBody;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "readableStreamBody", {
- /**
- * The response body as a node.js Readable stream.
- * Always undefined in the browser.
- *
- * It will automatically retry when internal read stream unexpected ends.
- *
- * @readonly
- * @type {(NodeJS.ReadableStream | undefined)}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return coreHttp.isNode ? this.blobDownloadStream : undefined;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobDownloadResponse.prototype, "_response", {
- /**
- * The HTTP response.
- *
- * @type {HttpResponse}
- * @memberof BlobDownloadResponse
- */
- get: function () {
- return this.originalResponse._response;
- },
- enumerable: false,
- configurable: true
- });
- return BlobDownloadResponse;
-}());
-
-// Copyright (c) Microsoft Corporation.
-// Licensed under the MIT license.
-var AVRO_SYNC_MARKER_SIZE = 16;
-var AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]);
-var AVRO_CODEC_KEY = "avro.codec";
-var AVRO_SCHEMA_KEY = "avro.schema";
-
-// Copyright (c) Microsoft Corporation.
-// Licensed under the MIT license.
-function arraysEqual(a, b) {
- if (a === b)
- return true;
- if (a == null || b == null)
- return false;
- if (a.length != b.length)
+}
+/**
+ * String.prototype.padStart()
+ *
+ * @export
+ * @param {string} currentString
+ * @param {number} targetLength
+ * @param {string} [padString=" "]
+ * @returns {string}
+ */
+function padStart(currentString, targetLength, padString) {
+ if (padString === void 0) { padString = " "; }
+ // TS doesn't know this code needs to run downlevel sometimes.
+ // @ts-expect-error
+ if (String.prototype.padStart) {
+ return currentString.padStart(targetLength, padString);
+ }
+ padString = padString || " ";
+ if (currentString.length > targetLength) {
+ return currentString;
+ }
+ else {
+ targetLength = targetLength - currentString.length;
+ if (targetLength > padString.length) {
+ padString += padString.repeat(targetLength / padString.length);
+ }
+ return padString.slice(0, targetLength) + currentString;
+ }
+}
+/**
+ * If two strings are equal when compared case insensitive.
+ *
+ * @export
+ * @param {string} str1
+ * @param {string} str2
+ * @returns {boolean}
+ */
+function iEqual(str1, str2) {
+ return str1.toLocaleLowerCase() === str2.toLocaleLowerCase();
+}
+/**
+ * Extracts account name from the url
+ * @param {string} url url to extract the account name from
+ * @returns {string} with the account name
+ */
+function getAccountNameFromUrl(url) {
+ var parsedUrl = coreHttp.URLBuilder.parse(url);
+ var accountName;
+ try {
+ if (parsedUrl.getHost().split(".")[1] === "blob") {
+ // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`;
+ accountName = parsedUrl.getHost().split(".")[0];
+ }
+ else if (isIpEndpointStyle(parsedUrl)) {
+ // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/
+ // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/
+ // .getPath() -> /devstoreaccount1/
+ accountName = parsedUrl.getPath().split("/")[1];
+ }
+ else {
+ // Custom domain case: "https://customdomain.com/containername/blob".
+ accountName = "";
+ }
+ return accountName;
+ }
+ catch (error) {
+ throw new Error("Unable to extract accountName with provided information.");
+ }
+}
+function isIpEndpointStyle(parsedUrl) {
+ if (parsedUrl.getHost() == undefined) {
return false;
- for (var i = 0; i < a.length; ++i) {
- if (a[i] !== b[i])
- return false;
}
- return true;
+ var host = parsedUrl.getHost() + (parsedUrl.getPort() == undefined ? "" : ":" + parsedUrl.getPort());
+ // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'.
+ // Case 2: localhost(:port), use broad regex to match port part.
+ // Case 3: Ipv4, use broad regex which just check if host contains Ipv4.
+ // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html.
+ return /^.*:.*:.*$|^localhost(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host);
+}
+/**
+ * Convert Tags to encoded string.
+ *
+ * @export
+ * @param {Tags} tags
+ * @returns {string | undefined}
+ */
+function toBlobTagsString(tags) {
+ if (tags === undefined) {
+ return undefined;
+ }
+ var tagPairs = [];
+ for (var key in tags) {
+ if (tags.hasOwnProperty(key)) {
+ var value = tags[key];
+ tagPairs.push(encodeURIComponent(key) + "=" + encodeURIComponent(value));
+ }
+ }
+ return tagPairs.join("&");
+}
+/**
+ * Convert Tags type to BlobTags.
+ *
+ * @export
+ * @param {Tags} [tags]
+ * @returns {(BlobTags | undefined)}
+ */
+function toBlobTags(tags) {
+ if (tags === undefined) {
+ return undefined;
+ }
+ var res = {
+ blobTagSet: []
+ };
+ for (var key in tags) {
+ if (tags.hasOwnProperty(key)) {
+ var value = tags[key];
+ res.blobTagSet.push({
+ key: key,
+ value: value
+ });
+ }
+ }
+ return res;
+}
+/**
+ * Covert BlobTags to Tags type.
+ *
+ * @export
+ * @param {BlobTags} [tags]
+ * @returns {(Tags | undefined)}
+ */
+function toTags(tags) {
+ if (tags === undefined) {
+ return undefined;
+ }
+ var res = {};
+ for (var _i = 0, _a = tags.blobTagSet; _i < _a.length; _i++) {
+ var blobTag = _a[_i];
+ res[blobTag.key] = blobTag.value;
+ }
+ return res;
+}
+/**
+ * Convert BlobQueryTextConfiguration to QuerySerialization type.
+ *
+ * @export
+ * @param {(BlobQueryJsonTextConfiguration | BlobQueryCsvTextConfiguration)} [textConfiguration]
+ * @returns {(QuerySerialization | undefined)}
+ */
+function toQuerySerialization(textConfiguration) {
+ if (textConfiguration === undefined) {
+ return undefined;
+ }
+ switch (textConfiguration.kind) {
+ case "csv":
+ return {
+ format: {
+ type: "delimited",
+ delimitedTextConfiguration: {
+ columnSeparator: textConfiguration.columnSeparator || ",",
+ fieldQuote: textConfiguration.fieldQuote || "",
+ recordSeparator: textConfiguration.recordSeparator,
+ escapeChar: textConfiguration.escapeCharacter || "",
+ headersPresent: textConfiguration.hasHeaders || false
+ }
+ }
+ };
+ case "json":
+ return {
+ format: {
+ type: "json",
+ jsonTextConfiguration: {
+ recordSeparator: textConfiguration.recordSeparator
+ }
+ }
+ };
+ default:
+ throw Error("Invalid BlobQueryTextConfiguration.");
+ }
+}
+function parseObjectReplicationRecord(objectReplicationRecord) {
+ if (!objectReplicationRecord) {
+ return undefined;
+ }
+ if ("policy-id" in objectReplicationRecord) {
+ // If the dictionary contains a key with policy id, we are not required to do any parsing since
+ // the policy id should already be stored in the ObjectReplicationDestinationPolicyId.
+ return undefined;
+ }
+ var orProperties = [];
+ var _loop_1 = function (key) {
+ var ids = key.split("_");
+ var policyPrefix = "or-";
+ if (ids[0].startsWith(policyPrefix)) {
+ ids[0] = ids[0].substring(policyPrefix.length);
+ }
+ var rule = {
+ ruleId: ids[1],
+ replicationStatus: objectReplicationRecord[key]
+ };
+ var policyIndex = orProperties.findIndex(function (policy) { return policy.policyId === ids[0]; });
+ if (policyIndex > -1) {
+ orProperties[policyIndex].rules.push(rule);
+ }
+ else {
+ orProperties.push({
+ policyId: ids[0],
+ rules: [rule]
+ });
+ }
+ };
+ for (var key in objectReplicationRecord) {
+ _loop_1(key);
+ }
+ return orProperties;
}
-// Copyright (c) Microsoft Corporation.
-var AvroParser = /** @class */ (function () {
- function AvroParser() {
+// Copyright (c) Microsoft Corporation. All rights reserved.
+/**
+ * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including:
+ *
+ * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'.
+ * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL
+ * thus avoid the browser cache.
+ *
+ * 2. Remove cookie header for security
+ *
+ * 3. Remove content-length header to avoid browsers warning
+ *
+ * @class StorageBrowserPolicy
+ * @extends {BaseRequestPolicy}
+ */
+var StorageBrowserPolicy = /** @class */ (function (_super) {
+ tslib.__extends(StorageBrowserPolicy, _super);
+ /**
+ * Creates an instance of StorageBrowserPolicy.
+ * @param {RequestPolicy} nextPolicy
+ * @param {RequestPolicyOptions} options
+ * @memberof StorageBrowserPolicy
+ */
+ function StorageBrowserPolicy(nextPolicy, options) {
+ return _super.call(this, nextPolicy, options) || this;
}
/**
- * Reads a fixed number of bytes from the stream.
+ * Sends out request.
*
- * @static
- * @param {AvroReadable} [stream]
- * @param {number} [length]
- * @param {AvroParserReadOptions} [options={}]
- * @returns {Promise}
- * @memberof AvroParser
+ * @param {WebResource} request
+ * @returns {Promise}
+ * @memberof StorageBrowserPolicy
*/
- AvroParser.readFixedBytes = function (stream, length, options) {
- if (options === void 0) { options = {}; }
+ StorageBrowserPolicy.prototype.sendRequest = function (request) {
return tslib.__awaiter(this, void 0, void 0, function () {
- var bytes;
return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0: return [4 /*yield*/, stream.read(length, { abortSignal: options.abortSignal })];
- case 1:
- bytes = _a.sent();
- if (bytes.length != length) {
- throw new Error("Hit stream end.");
- }
- return [2 /*return*/, bytes];
+ {
+ return [2 /*return*/, this._nextPolicy.sendRequest(request)];
}
});
});
};
+ return StorageBrowserPolicy;
+}(coreHttp.BaseRequestPolicy));
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+/**
+ * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects.
+ *
+ * @export
+ * @class StorageBrowserPolicyFactory
+ * @implements {RequestPolicyFactory}
+ */
+var StorageBrowserPolicyFactory = /** @class */ (function () {
+ function StorageBrowserPolicyFactory() {
+ }
/**
- * Reads a single byte from the stream.
+ * Creates a StorageBrowserPolicyFactory object.
*
- * @static
- * @param {AvroReadable} [stream]
- * @param {AvroParserReadOptions} [options={}]
- * @returns {Promise}
- * @memberof AvroParser
+ * @param {RequestPolicy} nextPolicy
+ * @param {RequestPolicyOptions} options
+ * @returns {StorageBrowserPolicy}
+ * @memberof StorageBrowserPolicyFactory
*/
- AvroParser.readByte = function (stream, options) {
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var buf;
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0: return [4 /*yield*/, AvroParser.readFixedBytes(stream, 1, options)];
- case 1:
- buf = _a.sent();
- return [2 /*return*/, buf[0]];
- }
- });
- });
- };
- // int and long are stored in variable-length zig-zag coding.
- // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt
- // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types
- AvroParser.readZigZagLong = function (stream, options) {
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var zigZagEncoded, significanceInBit, byte, haveMoreByte, significanceInFloat, res;
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0:
- zigZagEncoded = 0;
- significanceInBit = 0;
- _a.label = 1;
- case 1: return [4 /*yield*/, AvroParser.readByte(stream, options)];
- case 2:
- byte = _a.sent();
- haveMoreByte = byte & 0x80;
- zigZagEncoded |= (byte & 0x7f) << significanceInBit;
- significanceInBit += 7;
- _a.label = 3;
- case 3:
- if (haveMoreByte && significanceInBit < 28) return [3 /*break*/, 1];
- _a.label = 4;
- case 4:
- if (!haveMoreByte) return [3 /*break*/, 9];
- // Switch to float arithmetic
- zigZagEncoded = zigZagEncoded;
- significanceInFloat = 268435456; // 2 ** 28.
- _a.label = 5;
- case 5: return [4 /*yield*/, AvroParser.readByte(stream, options)];
- case 6:
- byte = _a.sent();
- zigZagEncoded += (byte & 0x7f) * significanceInFloat;
- significanceInFloat *= 128; // 2 ** 7
- _a.label = 7;
- case 7:
- if (byte & 0x80) return [3 /*break*/, 5];
- _a.label = 8;
- case 8:
- res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2;
- if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) {
- throw new Error("Integer overflow.");
- }
- return [2 /*return*/, res];
- case 9: return [2 /*return*/, (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1)];
- }
- });
- });
- };
- AvroParser.readLong = function (stream, options) {
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- return tslib.__generator(this, function (_a) {
- return [2 /*return*/, AvroParser.readZigZagLong(stream, options)];
- });
- });
- };
- AvroParser.readInt = function (stream, options) {
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- return tslib.__generator(this, function (_a) {
- return [2 /*return*/, AvroParser.readZigZagLong(stream, options)];
- });
- });
+ StorageBrowserPolicyFactory.prototype.create = function (nextPolicy, options) {
+ return new StorageBrowserPolicy(nextPolicy, options);
};
- AvroParser.readNull = function () {
+ return StorageBrowserPolicyFactory;
+}());
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+(function (StorageRetryPolicyType) {
+ /**
+ * Exponential retry. Retry time delay grows exponentially.
+ */
+ StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL";
+ /**
+ * Linear retry. Retry time delay grows linearly.
+ */
+ StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED";
+})(exports.StorageRetryPolicyType || (exports.StorageRetryPolicyType = {}));
+// Default values of StorageRetryOptions
+var DEFAULT_RETRY_OPTIONS = {
+ maxRetryDelayInMs: 120 * 1000,
+ maxTries: 4,
+ retryDelayInMs: 4 * 1000,
+ retryPolicyType: exports.StorageRetryPolicyType.EXPONENTIAL,
+ secondaryHost: "",
+ tryTimeoutInMs: undefined // Use server side default timeout strategy
+};
+var RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted.");
+/**
+ * Retry policy with exponential retry and linear retry implemented.
+ *
+ * @class RetryPolicy
+ * @extends {BaseRequestPolicy}
+ */
+var StorageRetryPolicy = /** @class */ (function (_super) {
+ tslib.__extends(StorageRetryPolicy, _super);
+ /**
+ * Creates an instance of RetryPolicy.
+ *
+ * @param {RequestPolicy} nextPolicy
+ * @param {RequestPolicyOptions} options
+ * @param {StorageRetryOptions} [retryOptions=DEFAULT_RETRY_OPTIONS]
+ * @memberof StorageRetryPolicy
+ */
+ function StorageRetryPolicy(nextPolicy, options, retryOptions) {
+ if (retryOptions === void 0) { retryOptions = DEFAULT_RETRY_OPTIONS; }
+ var _this = _super.call(this, nextPolicy, options) || this;
+ // Initialize retry options
+ _this.retryOptions = {
+ retryPolicyType: retryOptions.retryPolicyType
+ ? retryOptions.retryPolicyType
+ : DEFAULT_RETRY_OPTIONS.retryPolicyType,
+ maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1
+ ? Math.floor(retryOptions.maxTries)
+ : DEFAULT_RETRY_OPTIONS.maxTries,
+ tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0
+ ? retryOptions.tryTimeoutInMs
+ : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs,
+ retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0
+ ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs
+ ? retryOptions.maxRetryDelayInMs
+ : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs)
+ : DEFAULT_RETRY_OPTIONS.retryDelayInMs,
+ maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0
+ ? retryOptions.maxRetryDelayInMs
+ : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs,
+ secondaryHost: retryOptions.secondaryHost
+ ? retryOptions.secondaryHost
+ : DEFAULT_RETRY_OPTIONS.secondaryHost
+ };
+ return _this;
+ }
+ /**
+ * Sends request.
+ *
+ * @param {WebResource} request
+ * @returns {Promise}
+ * @memberof StorageRetryPolicy
+ */
+ StorageRetryPolicy.prototype.sendRequest = function (request) {
return tslib.__awaiter(this, void 0, void 0, function () {
return tslib.__generator(this, function (_a) {
- return [2 /*return*/, null];
+ return [2 /*return*/, this.attemptSendRequest(request, false, 1)];
});
});
};
- AvroParser.readBoolean = function (stream, options) {
- if (options === void 0) { options = {}; }
+ /**
+ * Decide and perform next retry. Won't mutate request parameter.
+ *
+ * @protected
+ * @param {WebResource} request
+ * @param {boolean} secondaryHas404 If attempt was against the secondary & it returned a StatusNotFound (404), then
+ * the resource was not found. This may be due to replication delay. So, in this
+ * case, we'll never try the secondary again for this operation.
+ * @param {number} attempt How many retries has been attempted to performed, starting from 1, which includes
+ * the attempt will be performed by this method call.
+ * @returns {Promise}
+ * @memberof StorageRetryPolicy
+ */
+ StorageRetryPolicy.prototype.attemptSendRequest = function (request, secondaryHas404, attempt) {
return tslib.__awaiter(this, void 0, void 0, function () {
- var b;
+ var newRequest, isPrimaryRetry, response, err_1;
return tslib.__generator(this, function (_a) {
switch (_a.label) {
- case 0: return [4 /*yield*/, AvroParser.readByte(stream, options)];
- case 1:
- b = _a.sent();
- if (b == 1) {
- return [2 /*return*/, true];
- }
- else if (b == 0) {
- return [2 /*return*/, false];
+ case 0:
+ newRequest = request.clone();
+ isPrimaryRetry = secondaryHas404 ||
+ !this.retryOptions.secondaryHost ||
+ !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") ||
+ attempt % 2 === 1;
+ if (!isPrimaryRetry) {
+ newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost);
}
- else {
- throw new Error("Byte was not a boolean.");
+ // Set the server-side timeout query parameter "timeout=[seconds]"
+ if (this.retryOptions.tryTimeoutInMs) {
+ newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1000).toString());
}
- }
- });
- });
- };
- AvroParser.readFloat = function (stream, options) {
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var u8arr, view;
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0: return [4 /*yield*/, AvroParser.readFixedBytes(stream, 4, options)];
- case 1:
- u8arr = _a.sent();
- view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength);
- return [2 /*return*/, view.getFloat32(0, true)]; // littleEndian = true
- }
- });
- });
- };
- AvroParser.readDouble = function (stream, options) {
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var u8arr, view;
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0: return [4 /*yield*/, AvroParser.readFixedBytes(stream, 8, options)];
- case 1:
- u8arr = _a.sent();
- view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength);
- return [2 /*return*/, view.getFloat64(0, true)]; // littleEndian = true
- }
- });
- });
- };
- AvroParser.readBytes = function (stream, options) {
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var size;
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0: return [4 /*yield*/, AvroParser.readLong(stream, options)];
+ _a.label = 1;
case 1:
- size = _a.sent();
- if (size < 0) {
- throw new Error("Bytes size was negative.");
+ _a.trys.push([1, 3, , 4]);
+ logger.info("RetryPolicy: =====> Try=" + attempt + " " + (isPrimaryRetry ? "Primary" : "Secondary"));
+ return [4 /*yield*/, this._nextPolicy.sendRequest(newRequest)];
+ case 2:
+ response = _a.sent();
+ if (!this.shouldRetry(isPrimaryRetry, attempt, response)) {
+ return [2 /*return*/, response];
}
- return [4 /*yield*/, stream.read(size, { abortSignal: options.abortSignal })];
- case 2: return [2 /*return*/, _a.sent()];
- }
- });
- });
- };
- AvroParser.readString = function (stream, options) {
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var u8arr, utf8decoder;
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0: return [4 /*yield*/, AvroParser.readBytes(stream, options)];
- case 1:
- u8arr = _a.sent();
- // polyfill TextDecoder to be backward compatible with older
- // nodejs that doesn't expose TextDecoder as a global variable
- if (typeof TextDecoder === "undefined" && "function" !== "undefined") {
- global.TextDecoder = __webpack_require__(669).TextDecoder;
+ secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404);
+ return [3 /*break*/, 4];
+ case 3:
+ err_1 = _a.sent();
+ logger.error("RetryPolicy: Caught error, message: " + err_1.message + ", code: " + err_1.code);
+ if (!this.shouldRetry(isPrimaryRetry, attempt, response, err_1)) {
+ throw err_1;
}
- utf8decoder = new TextDecoder();
- return [2 /*return*/, utf8decoder.decode(u8arr)];
+ return [3 /*break*/, 4];
+ case 4: return [4 /*yield*/, this.delay(isPrimaryRetry, attempt, request.abortSignal)];
+ case 5:
+ _a.sent();
+ return [4 /*yield*/, this.attemptSendRequest(request, secondaryHas404, ++attempt)];
+ case 6: return [2 /*return*/, _a.sent()];
}
});
});
};
- AvroParser.readMapPair = function (stream, readItemMethod, options) {
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var key, value;
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0: return [4 /*yield*/, AvroParser.readString(stream, options)];
- case 1:
- key = _a.sent();
- return [4 /*yield*/, readItemMethod(stream, options)];
- case 2:
- value = _a.sent();
- return [2 /*return*/, { key: key, value: value }];
+ /**
+ * Decide whether to retry according to last HTTP response and retry counters.
+ *
+ * @protected
+ * @param {boolean} isPrimaryRetry
+ * @param {number} attempt
+ * @param {HttpOperationResponse} [response]
+ * @param {RestError} [err]
+ * @returns {boolean}
+ * @memberof StorageRetryPolicy
+ */
+ StorageRetryPolicy.prototype.shouldRetry = function (isPrimaryRetry, attempt, response, err) {
+ if (attempt >= this.retryOptions.maxTries) {
+ logger.info("RetryPolicy: Attempt(s) " + attempt + " >= maxTries " + this.retryOptions
+ .maxTries + ", no further try.");
+ return false;
+ }
+ // Handle network failures, you may need to customize the list when you implement
+ // your own http client
+ var retriableErrors = [
+ "ETIMEDOUT",
+ "ESOCKETTIMEDOUT",
+ "ECONNREFUSED",
+ "ECONNRESET",
+ "ENOENT",
+ "ENOTFOUND",
+ "TIMEOUT",
+ "EPIPE",
+ "REQUEST_SEND_ERROR" // For default xhr based http client provided in ms-rest-js
+ ];
+ if (err) {
+ for (var _i = 0, retriableErrors_1 = retriableErrors; _i < retriableErrors_1.length; _i++) {
+ var retriableError = retriableErrors_1[_i];
+ if (err.name.toUpperCase().includes(retriableError) ||
+ err.message.toUpperCase().includes(retriableError) ||
+ (err.code &&
+ err.code
+ .toString()
+ .toUpperCase()
+ .includes(retriableError))) {
+ logger.info("RetryPolicy: Network error " + retriableError + " found, will retry.");
+ return true;
}
- });
- });
+ }
+ }
+ // If attempt was against the secondary & it returned a StatusNotFound (404), then
+ // the resource was not found. This may be due to replication delay. So, in this
+ // case, we'll never try the secondary again for this operation.
+ if (response || err) {
+ var statusCode = response ? response.status : err ? err.statusCode : 0;
+ if (!isPrimaryRetry && statusCode === 404) {
+ logger.info("RetryPolicy: Secondary access with 404, will retry.");
+ return true;
+ }
+ // Server internal error or server timeout
+ if (statusCode === 503 || statusCode === 500) {
+ logger.info("RetryPolicy: Will retry for status code " + statusCode + ".");
+ return true;
+ }
+ }
+ return false;
};
- AvroParser.readMap = function (stream, readItemMethod, options) {
- if (options === void 0) { options = {}; }
+ /**
+ * Delay a calculated time between retries.
+ *
+ * @private
+ * @param {boolean} isPrimaryRetry
+ * @param {number} attempt
+ * @param {AbortSignalLike} [abortSignal]
+ * @memberof StorageRetryPolicy
+ */
+ StorageRetryPolicy.prototype.delay = function (isPrimaryRetry, attempt, abortSignal) {
return tslib.__awaiter(this, void 0, void 0, function () {
- var readPairMethod, pairs, dict, _i, pairs_1, pair;
- var _this = this;
+ var delayTimeInMs;
return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0:
- readPairMethod = function (stream, options) {
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(_this, void 0, void 0, function () {
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0: return [4 /*yield*/, AvroParser.readMapPair(stream, readItemMethod, options)];
- case 1: return [2 /*return*/, _a.sent()];
- }
- });
- });
- };
- return [4 /*yield*/, AvroParser.readArray(stream, readPairMethod, options)];
- case 1:
- pairs = _a.sent();
- dict = {};
- for (_i = 0, pairs_1 = pairs; _i < pairs_1.length; _i++) {
- pair = pairs_1[_i];
- dict[pair.key] = pair.value;
- }
- return [2 /*return*/, dict];
+ delayTimeInMs = 0;
+ if (isPrimaryRetry) {
+ switch (this.retryOptions.retryPolicyType) {
+ case exports.StorageRetryPolicyType.EXPONENTIAL:
+ delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs);
+ break;
+ case exports.StorageRetryPolicyType.FIXED:
+ delayTimeInMs = this.retryOptions.retryDelayInMs;
+ break;
+ }
}
- });
- });
- };
- AvroParser.readArray = function (stream, readItemMethod, options) {
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var items, count, item;
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0:
- items = [];
- return [4 /*yield*/, AvroParser.readLong(stream, options)];
- case 1:
- count = _a.sent();
- _a.label = 2;
- case 2:
- if (!(count != 0)) return [3 /*break*/, 8];
- if (!(count < 0)) return [3 /*break*/, 4];
- // Ignore block sizes
- return [4 /*yield*/, AvroParser.readLong(stream, options)];
- case 3:
- // Ignore block sizes
- _a.sent();
- count = -count;
- _a.label = 4;
- case 4:
- if (!count--) return [3 /*break*/, 6];
- return [4 /*yield*/, readItemMethod(stream, options)];
- case 5:
- item = _a.sent();
- items.push(item);
- return [3 /*break*/, 4];
- case 6: return [4 /*yield*/, AvroParser.readLong(stream, options)];
- case 7:
- count = _a.sent();
- return [3 /*break*/, 2];
- case 8: return [2 /*return*/, items];
+ else {
+ delayTimeInMs = Math.random() * 1000;
}
+ logger.info("RetryPolicy: Delay for " + delayTimeInMs + "ms");
+ return [2 /*return*/, delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR)];
});
});
};
- return AvroParser;
-}());
-var AvroComplex;
-(function (AvroComplex) {
- AvroComplex["RECORD"] = "record";
- AvroComplex["ENUM"] = "enum";
- AvroComplex["ARRAY"] = "array";
- AvroComplex["MAP"] = "map";
- AvroComplex["UNION"] = "union";
- AvroComplex["FIXED"] = "fixed";
-})(AvroComplex || (AvroComplex = {}));
-var AvroType = /** @class */ (function () {
- function AvroType() {
+ return StorageRetryPolicy;
+}(coreHttp.BaseRequestPolicy));
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+/**
+ * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects.
+ *
+ * @export
+ * @class StorageRetryPolicyFactory
+ * @implements {RequestPolicyFactory}
+ */
+var StorageRetryPolicyFactory = /** @class */ (function () {
+ /**
+ * Creates an instance of StorageRetryPolicyFactory.
+ * @param {StorageRetryOptions} [retryOptions]
+ * @memberof StorageRetryPolicyFactory
+ */
+ function StorageRetryPolicyFactory(retryOptions) {
+ this.retryOptions = retryOptions;
}
/**
- * Determines the AvroType from the Avro Schema.
+ * Creates a StorageRetryPolicy object.
+ *
+ * @param {RequestPolicy} nextPolicy
+ * @param {RequestPolicyOptions} options
+ * @returns {StorageRetryPolicy}
+ * @memberof StorageRetryPolicyFactory
*/
- AvroType.fromSchema = function (schema) {
- if (typeof schema == "string") {
- return AvroType.fromStringSchema(schema);
- }
- else if (Array.isArray(schema)) {
- return AvroType.fromArraySchema(schema);
- }
- else {
- return AvroType.fromObjectSchema(schema);
- }
- };
- AvroType.fromStringSchema = function (schema) {
- switch (schema) {
- case AvroPrimitive.NULL:
- case AvroPrimitive.BOOLEAN:
- case AvroPrimitive.INT:
- case AvroPrimitive.LONG:
- case AvroPrimitive.FLOAT:
- case AvroPrimitive.DOUBLE:
- case AvroPrimitive.BYTES:
- case AvroPrimitive.STRING:
- return new AvroPrimitiveType(schema);
- default:
- throw new Error("Unexpected Avro type " + schema);
- }
- };
- AvroType.fromArraySchema = function (schema) {
- return new AvroUnionType(schema.map(AvroType.fromSchema));
+ StorageRetryPolicyFactory.prototype.create = function (nextPolicy, options) {
+ return new StorageRetryPolicy(nextPolicy, options, this.retryOptions);
};
- AvroType.fromObjectSchema = function (schema) {
- var type = schema.type;
- // Primitives can be defined as strings or objects
- try {
- return AvroType.fromStringSchema(type);
- }
- catch (err) { }
- switch (type) {
- case AvroComplex.RECORD:
- if (schema.aliases) {
- throw new Error("aliases currently is not supported, schema: " + schema);
- }
- if (!schema.name) {
- throw new Error("Required attribute 'name' doesn't exist on schema: " + schema);
- }
- var fields = {};
- if (!schema.fields) {
- throw new Error("Required attribute 'fields' doesn't exist on schema: " + schema);
- }
- for (var _i = 0, _a = schema.fields; _i < _a.length; _i++) {
- var field = _a[_i];
- fields[field.name] = AvroType.fromSchema(field.type);
- }
- return new AvroRecordType(fields, schema.name);
- case AvroComplex.ENUM:
- if (schema.aliases) {
- throw new Error("aliases currently is not supported, schema: " + schema);
- }
- if (!schema.symbols) {
- throw new Error("Required attribute 'symbols' doesn't exist on schema: " + schema);
- }
- return new AvroEnumType(schema.symbols);
- case AvroComplex.MAP:
- if (!schema.values) {
- throw new Error("Required attribute 'values' doesn't exist on schema: " + schema);
- }
- return new AvroMapType(AvroType.fromSchema(schema.values));
- case AvroComplex.ARRAY: // Unused today
- case AvroComplex.FIXED: // Unused today
- default:
- throw new Error("Unexpected Avro type " + type + " in " + schema);
- }
- };
- return AvroType;
+ return StorageRetryPolicyFactory;
}());
-var AvroPrimitive;
-(function (AvroPrimitive) {
- AvroPrimitive["NULL"] = "null";
- AvroPrimitive["BOOLEAN"] = "boolean";
- AvroPrimitive["INT"] = "int";
- AvroPrimitive["LONG"] = "long";
- AvroPrimitive["FLOAT"] = "float";
- AvroPrimitive["DOUBLE"] = "double";
- AvroPrimitive["BYTES"] = "bytes";
- AvroPrimitive["STRING"] = "string";
-})(AvroPrimitive || (AvroPrimitive = {}));
-var AvroPrimitiveType = /** @class */ (function (_super) {
- tslib.__extends(AvroPrimitiveType, _super);
- function AvroPrimitiveType(primitive) {
- var _this = _super.call(this) || this;
- _this._primitive = primitive;
- return _this;
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+/**
+ * Credential policy used to sign HTTP(S) requests before sending. This is an
+ * abstract class.
+ *
+ * @export
+ * @abstract
+ * @class CredentialPolicy
+ * @extends {BaseRequestPolicy}
+ */
+var CredentialPolicy = /** @class */ (function (_super) {
+ tslib.__extends(CredentialPolicy, _super);
+ function CredentialPolicy() {
+ return _super !== null && _super.apply(this, arguments) || this;
}
- AvroPrimitiveType.prototype.read = function (stream, options) {
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var _a;
- return tslib.__generator(this, function (_b) {
- switch (_b.label) {
- case 0:
- _a = this._primitive;
- switch (_a) {
- case AvroPrimitive.NULL: return [3 /*break*/, 1];
- case AvroPrimitive.BOOLEAN: return [3 /*break*/, 3];
- case AvroPrimitive.INT: return [3 /*break*/, 5];
- case AvroPrimitive.LONG: return [3 /*break*/, 7];
- case AvroPrimitive.FLOAT: return [3 /*break*/, 9];
- case AvroPrimitive.DOUBLE: return [3 /*break*/, 11];
- case AvroPrimitive.BYTES: return [3 /*break*/, 13];
- case AvroPrimitive.STRING: return [3 /*break*/, 15];
- }
- return [3 /*break*/, 17];
- case 1: return [4 /*yield*/, AvroParser.readNull()];
- case 2: return [2 /*return*/, _b.sent()];
- case 3: return [4 /*yield*/, AvroParser.readBoolean(stream, options)];
- case 4: return [2 /*return*/, _b.sent()];
- case 5: return [4 /*yield*/, AvroParser.readInt(stream, options)];
- case 6: return [2 /*return*/, _b.sent()];
- case 7: return [4 /*yield*/, AvroParser.readLong(stream, options)];
- case 8: return [2 /*return*/, _b.sent()];
- case 9: return [4 /*yield*/, AvroParser.readFloat(stream, options)];
- case 10: return [2 /*return*/, _b.sent()];
- case 11: return [4 /*yield*/, AvroParser.readDouble(stream, options)];
- case 12: return [2 /*return*/, _b.sent()];
- case 13: return [4 /*yield*/, AvroParser.readBytes(stream, options)];
- case 14: return [2 /*return*/, _b.sent()];
- case 15: return [4 /*yield*/, AvroParser.readString(stream, options)];
- case 16: return [2 /*return*/, _b.sent()];
- case 17: throw new Error("Unknown Avro Primitive");
- }
- });
- });
+ /**
+ * Sends out request.
+ *
+ * @param {WebResource} request
+ * @returns {Promise}
+ * @memberof CredentialPolicy
+ */
+ CredentialPolicy.prototype.sendRequest = function (request) {
+ return this._nextPolicy.sendRequest(this.signRequest(request));
};
- return AvroPrimitiveType;
-}(AvroType));
-var AvroEnumType = /** @class */ (function (_super) {
- tslib.__extends(AvroEnumType, _super);
- function AvroEnumType(symbols) {
- var _this = _super.call(this) || this;
- _this._symbols = symbols;
- return _this;
+ /**
+ * Child classes must implement this method with request signing. This method
+ * will be executed in {@link sendRequest}.
+ *
+ * @protected
+ * @abstract
+ * @param {WebResource} request
+ * @returns {WebResource}
+ * @memberof CredentialPolicy
+ */
+ CredentialPolicy.prototype.signRequest = function (request) {
+ // Child classes must override this method with request signing. This method
+ // will be executed in sendRequest().
+ return request;
+ };
+ return CredentialPolicy;
+}(coreHttp.BaseRequestPolicy));
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+/**
+ * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources
+ * or for use with Shared Access Signatures (SAS).
+ *
+ * @export
+ * @class AnonymousCredentialPolicy
+ * @extends {CredentialPolicy}
+ */
+var AnonymousCredentialPolicy = /** @class */ (function (_super) {
+ tslib.__extends(AnonymousCredentialPolicy, _super);
+ /**
+ * Creates an instance of AnonymousCredentialPolicy.
+ * @param {RequestPolicy} nextPolicy
+ * @param {RequestPolicyOptions} options
+ * @memberof AnonymousCredentialPolicy
+ */
+ function AnonymousCredentialPolicy(nextPolicy, options) {
+ return _super.call(this, nextPolicy, options) || this;
}
- AvroEnumType.prototype.read = function (stream, options) {
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var value;
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0: return [4 /*yield*/, AvroParser.readInt(stream, options)];
- case 1:
- value = _a.sent();
- return [2 /*return*/, this._symbols[value]];
- }
- });
- });
+ return AnonymousCredentialPolicy;
+}(CredentialPolicy));
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+/**
+ * Credential is an abstract class for Azure Storage HTTP requests signing. This
+ * class will host an credentialPolicyCreator factory which generates CredentialPolicy.
+ *
+ * @export
+ * @abstract
+ * @class Credential
+ */
+var Credential = /** @class */ (function () {
+ function Credential() {
+ }
+ /**
+ * Creates a RequestPolicy object.
+ *
+ * @param {RequestPolicy} _nextPolicy
+ * @param {RequestPolicyOptions} _options
+ * @returns {RequestPolicy}
+ * @memberof Credential
+ */
+ Credential.prototype.create = function (
+ // tslint:disable-next-line:variable-name
+ _nextPolicy,
+ // tslint:disable-next-line:variable-name
+ _options) {
+ throw new Error("Method should be implemented in children classes.");
};
- return AvroEnumType;
-}(AvroType));
-var AvroUnionType = /** @class */ (function (_super) {
- tslib.__extends(AvroUnionType, _super);
- function AvroUnionType(types) {
- var _this = _super.call(this) || this;
- _this._types = types;
- return _this;
+ return Credential;
+}());
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+/**
+ * AnonymousCredential provides a credentialPolicyCreator member used to create
+ * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with
+ * HTTP(S) requests that read public resources or for use with Shared Access
+ * Signatures (SAS).
+ *
+ * @export
+ * @class AnonymousCredential
+ * @extends {Credential}
+ */
+var AnonymousCredential = /** @class */ (function (_super) {
+ tslib.__extends(AnonymousCredential, _super);
+ function AnonymousCredential() {
+ return _super !== null && _super.apply(this, arguments) || this;
}
- AvroUnionType.prototype.read = function (stream, options) {
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var typeIndex;
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0: return [4 /*yield*/, AvroParser.readInt(stream, options)];
- case 1:
- typeIndex = _a.sent();
- return [4 /*yield*/, this._types[typeIndex].read(stream, options)];
- case 2: return [2 /*return*/, _a.sent()];
- }
- });
- });
+ /**
+ * Creates an {@link AnonymousCredentialPolicy} object.
+ *
+ * @param {RequestPolicy} nextPolicy
+ * @param {RequestPolicyOptions} options
+ * @returns {AnonymousCredentialPolicy}
+ * @memberof AnonymousCredential
+ */
+ AnonymousCredential.prototype.create = function (nextPolicy, options) {
+ return new AnonymousCredentialPolicy(nextPolicy, options);
};
- return AvroUnionType;
-}(AvroType));
-var AvroMapType = /** @class */ (function (_super) {
- tslib.__extends(AvroMapType, _super);
- function AvroMapType(itemType) {
- var _this = _super.call(this) || this;
- _this._itemType = itemType;
+ return AnonymousCredential;
+}(Credential));
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+/**
+ * TelemetryPolicy is a policy used to tag user-agent header for every requests.
+ *
+ * @class TelemetryPolicy
+ * @extends {BaseRequestPolicy}
+ */
+var TelemetryPolicy = /** @class */ (function (_super) {
+ tslib.__extends(TelemetryPolicy, _super);
+ /**
+ * Creates an instance of TelemetryPolicy.
+ * @param {RequestPolicy} nextPolicy
+ * @param {RequestPolicyOptions} options
+ * @param {string} telemetry
+ * @memberof TelemetryPolicy
+ */
+ function TelemetryPolicy(nextPolicy, options, telemetry) {
+ var _this = _super.call(this, nextPolicy, options) || this;
+ _this.telemetry = telemetry;
return _this;
}
- AvroMapType.prototype.read = function (stream, options) {
- if (options === void 0) { options = {}; }
+ /**
+ * Sends out request.
+ *
+ * @param {WebResource} request
+ * @returns {Promise}
+ * @memberof TelemetryPolicy
+ */
+ TelemetryPolicy.prototype.sendRequest = function (request) {
return tslib.__awaiter(this, void 0, void 0, function () {
- var readItemMethod;
- var _this = this;
return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0:
- readItemMethod = function (s, options) { return tslib.__awaiter(_this, void 0, void 0, function () {
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0: return [4 /*yield*/, this._itemType.read(s, options)];
- case 1: return [2 /*return*/, _a.sent()];
- }
- });
- }); };
- return [4 /*yield*/, AvroParser.readMap(stream, readItemMethod, options)];
- case 1: return [2 /*return*/, _a.sent()];
+ {
+ if (!request.headers) {
+ request.headers = new coreHttp.HttpHeaders();
+ }
+ if (!request.headers.get(HeaderConstants.USER_AGENT)) {
+ request.headers.set(HeaderConstants.USER_AGENT, this.telemetry);
+ }
}
+ return [2 /*return*/, this._nextPolicy.sendRequest(request)];
});
});
};
- return AvroMapType;
-}(AvroType));
-var AvroRecordType = /** @class */ (function (_super) {
- tslib.__extends(AvroRecordType, _super);
- function AvroRecordType(fields, name) {
- var _this = _super.call(this) || this;
- _this._fields = fields;
- _this._name = name;
- return _this;
- }
- AvroRecordType.prototype.read = function (stream, options) {
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var record, _a, _b, _i, key, _c, _d;
- return tslib.__generator(this, function (_e) {
- switch (_e.label) {
- case 0:
- record = {};
- record["$schema"] = this._name;
- _a = [];
- for (_b in this._fields)
- _a.push(_b);
- _i = 0;
- _e.label = 1;
- case 1:
- if (!(_i < _a.length)) return [3 /*break*/, 4];
- key = _a[_i];
- if (!this._fields.hasOwnProperty(key)) return [3 /*break*/, 3];
- _c = record;
- _d = key;
- return [4 /*yield*/, this._fields[key].read(stream, options)];
- case 2:
- _c[_d] = _e.sent();
- _e.label = 3;
- case 3:
- _i++;
- return [3 /*break*/, 1];
- case 4: return [2 /*return*/, record];
+ return TelemetryPolicy;
+}(coreHttp.BaseRequestPolicy));
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+/**
+ * TelemetryPolicyFactory is a factory class helping generating {@link TelemetryPolicy} objects.
+ *
+ * @export
+ * @class TelemetryPolicyFactory
+ * @implements {RequestPolicyFactory}
+ */
+var TelemetryPolicyFactory = /** @class */ (function () {
+ /**
+ * Creates an instance of TelemetryPolicyFactory.
+ * @param {UserAgentOptions} [telemetry]
+ * @memberof TelemetryPolicyFactory
+ */
+ function TelemetryPolicyFactory(telemetry) {
+ var userAgentInfo = [];
+ {
+ if (telemetry) {
+ // FIXME: replace() only replaces the first space. And we have no idea why we need to replace spaces in the first place.
+ // But fixing this would be a breaking change. Logged an issue here: https://github.com/Azure/azure-sdk-for-js/issues/10793
+ var telemetryString = (telemetry.userAgentPrefix || "").replace(" ", "");
+ if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) {
+ userAgentInfo.push(telemetryString);
}
- });
- });
+ }
+ // e.g. azsdk-js-storageblob/10.0.0
+ var libInfo = "azsdk-js-storageblob/" + SDK_VERSION;
+ if (userAgentInfo.indexOf(libInfo) === -1) {
+ userAgentInfo.push(libInfo);
+ }
+ // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299)
+ var runtimeInfo = "(NODE-VERSION " + process.version + "; " + os.type() + " " + os.release() + ")";
+ if (userAgentInfo.indexOf(runtimeInfo) === -1) {
+ userAgentInfo.push(runtimeInfo);
+ }
+ }
+ this.telemetryString = userAgentInfo.join(" ");
+ }
+ /**
+ * Creates a TelemetryPolicy object.
+ *
+ * @param {RequestPolicy} nextPolicy
+ * @param {RequestPolicyOptions} options
+ * @returns {TelemetryPolicy}
+ * @memberof TelemetryPolicyFactory
+ */
+ TelemetryPolicyFactory.prototype.create = function (nextPolicy, options) {
+ return new TelemetryPolicy(nextPolicy, options, this.telemetryString);
};
- return AvroRecordType;
-}(AvroType));
+ return TelemetryPolicyFactory;
+}());
// Copyright (c) Microsoft Corporation.
-var AvroReader = /** @class */ (function () {
- function AvroReader(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) {
- this._dataStream = dataStream;
- this._headerStream = headerStream || dataStream;
- this._initialized = false;
- this._blockOffset = currentBlockOffset || 0;
- this._objectIndex = indexWithinCurrentBlock || 0;
- this._initialBlockOffset = currentBlockOffset || 0;
- }
- Object.defineProperty(AvroReader.prototype, "blockOffset", {
- get: function () {
- return this._blockOffset;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(AvroReader.prototype, "objectIndex", {
- get: function () {
- return this._objectIndex;
- },
- enumerable: false,
- configurable: true
- });
- AvroReader.prototype.initialize = function (options) {
+var _defaultHttpClient = new coreHttp.DefaultHttpClient();
+function getCachedDefaultHttpClient() {
+ return _defaultHttpClient;
+}
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+/**
+ * A Pipeline class containing HTTP request policies.
+ * You can create a default Pipeline by calling {@link newPipeline}.
+ * Or you can create a Pipeline with your own policies by the constructor of Pipeline.
+ *
+ * Refer to {@link newPipeline} and provided policies before implementing your
+ * customized Pipeline.
+ *
+ * @export
+ * @class Pipeline
+ */
+var Pipeline = /** @class */ (function () {
+ /**
+ * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface.
+ *
+ * @param {RequestPolicyFactory[]} factories
+ * @param {PipelineOptions} [options={}]
+ * @memberof Pipeline
+ */
+ function Pipeline(factories, options) {
if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var header, _a, codec, _b, schema, _c, i;
- return tslib.__generator(this, function (_d) {
- switch (_d.label) {
- case 0: return [4 /*yield*/, AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, {
- abortSignal: options.abortSignal
- })];
- case 1:
- header = _d.sent();
- if (!arraysEqual(header, AVRO_INIT_BYTES)) {
- throw new Error("Stream is not an Avro file.");
- }
- // File metadata is written as if defined by the following map schema:
- // { "type": "map", "values": "bytes"}
- _a = this;
- return [4 /*yield*/, AvroParser.readMap(this._headerStream, AvroParser.readString, {
- abortSignal: options.abortSignal
- })];
- case 2:
- // File metadata is written as if defined by the following map schema:
- // { "type": "map", "values": "bytes"}
- _a._metadata = _d.sent();
- codec = this._metadata[AVRO_CODEC_KEY];
- if (!(codec == undefined || codec == "null")) {
- throw new Error("Codecs are not supported");
- }
- // The 16-byte, randomly-generated sync marker for this file.
- _b = this;
- return [4 /*yield*/, AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, {
- abortSignal: options.abortSignal
- })];
- case 3:
- // The 16-byte, randomly-generated sync marker for this file.
- _b._syncMarker = _d.sent();
- schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]);
- this._itemType = AvroType.fromSchema(schema);
- if (this._blockOffset == 0) {
- this._blockOffset = this._initialBlockOffset + this._dataStream.position;
- }
- _c = this;
- return [4 /*yield*/, AvroParser.readLong(this._dataStream, {
- abortSignal: options.abortSignal
- })];
- case 4:
- _c._itemsRemainingInBlock = _d.sent();
- // skip block length
- return [4 /*yield*/, AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })];
- case 5:
- // skip block length
- _d.sent();
- this._initialized = true;
- if (!(this._objectIndex && this._objectIndex > 0)) return [3 /*break*/, 9];
- i = 0;
- _d.label = 6;
- case 6:
- if (!(i < this._objectIndex)) return [3 /*break*/, 9];
- return [4 /*yield*/, this._itemType.read(this._dataStream, { abortSignal: options.abortSignal })];
- case 7:
- _d.sent();
- this._itemsRemainingInBlock--;
- _d.label = 8;
- case 8:
- i++;
- return [3 /*break*/, 6];
- case 9: return [2 /*return*/];
- }
- });
- });
- };
- AvroReader.prototype.hasNext = function () {
- return !this._initialized || this._itemsRemainingInBlock > 0;
- };
- AvroReader.prototype.parseObjects = function (options) {
- if (options === void 0) { options = {}; }
- return tslib.__asyncGenerator(this, arguments, function parseObjects_1() {
- var result, marker, _a, err_1;
- return tslib.__generator(this, function (_b) {
- switch (_b.label) {
- case 0:
- if (!!this._initialized) return [3 /*break*/, 2];
- return [4 /*yield*/, tslib.__await(this.initialize(options))];
- case 1:
- _b.sent();
- _b.label = 2;
- case 2:
- if (!this.hasNext()) return [3 /*break*/, 13];
- return [4 /*yield*/, tslib.__await(this._itemType.read(this._dataStream, {
- abortSignal: options.abortSignal
- }))];
- case 3:
- result = _b.sent();
- this._itemsRemainingInBlock--;
- this._objectIndex++;
- if (!(this._itemsRemainingInBlock == 0)) return [3 /*break*/, 10];
- return [4 /*yield*/, tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, {
- abortSignal: options.abortSignal
- }))];
- case 4:
- marker = _b.sent();
- this._blockOffset = this._initialBlockOffset + this._dataStream.position;
- this._objectIndex = 0;
- if (!arraysEqual(this._syncMarker, marker)) {
- throw new Error("Stream is not a valid Avro file.");
- }
- _b.label = 5;
- case 5:
- _b.trys.push([5, 7, , 8]);
- _a = this;
- return [4 /*yield*/, tslib.__await(AvroParser.readLong(this._dataStream, {
- abortSignal: options.abortSignal
- }))];
- case 6:
- _a._itemsRemainingInBlock = _b.sent();
- return [3 /*break*/, 8];
- case 7:
- err_1 = _b.sent();
- // We hit the end of the stream.
- this._itemsRemainingInBlock = 0;
- return [3 /*break*/, 8];
- case 8:
- if (!(this._itemsRemainingInBlock > 0)) return [3 /*break*/, 10];
- // Ignore block size
- return [4 /*yield*/, tslib.__await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }))];
- case 9:
- // Ignore block size
- _b.sent();
- _b.label = 10;
- case 10: return [4 /*yield*/, tslib.__await(result)];
- case 11: return [4 /*yield*/, _b.sent()];
- case 12:
- _b.sent();
- return [3 /*break*/, 2];
- case 13: return [2 /*return*/];
- }
- });
- });
+ this.factories = factories;
+ // when options.httpClient is not specified, passing in a DefaultHttpClient instance to
+ // avoid each client creating its own http client.
+ this.options = tslib.__assign(tslib.__assign({}, options), { httpClient: options.httpClient || getCachedDefaultHttpClient() });
+ }
+ /**
+ * Transfer Pipeline object to ServiceClientOptions object which is required by
+ * ServiceClient constructor.
+ *
+ * @returns {ServiceClientOptions} The ServiceClientOptions object from this Pipeline.
+ * @memberof Pipeline
+ */
+ Pipeline.prototype.toServiceClientOptions = function () {
+ return {
+ httpClient: this.options.httpClient,
+ requestPolicyFactories: this.factories
+ };
};
- return AvroReader;
+ return Pipeline;
}());
-
-// Copyright (c) Microsoft Corporation.
-// Licensed under the MIT license.
-var AvroReadable = /** @class */ (function () {
- function AvroReadable() {
+/**
+ * Creates a new Pipeline object with Credential provided.
+ *
+ * @export
+ * @param {StorageSharedKeyCredential | AnonymousCredential | TokenCredential} credential Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the @azure/identity package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.
+ * @param {StoragePipelineOptions} [pipelineOptions] Optional. Options.
+ * @returns {Pipeline} A new Pipeline object.
+ */
+function newPipeline(credential, pipelineOptions) {
+ if (pipelineOptions === void 0) { pipelineOptions = {}; }
+ if (credential === undefined) {
+ credential = new AnonymousCredential();
}
- return AvroReadable;
-}());
-
-// Copyright (c) Microsoft Corporation.
-var ABORT_ERROR$1 = new abortController.AbortError("Reading from the avro stream was aborted.");
-var AvroReadableFromStream = /** @class */ (function (_super) {
- tslib.__extends(AvroReadableFromStream, _super);
- function AvroReadableFromStream(readable) {
- var _this = _super.call(this) || this;
- _this._readable = readable;
- _this._position = 0;
- return _this;
+ // Order is important. Closer to the API at the top & closer to the network at the bottom.
+ // The credential's policy factory must appear close to the wire so it can sign any
+ // changes made by other factories (like UniqueRequestIDPolicyFactory)
+ var telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions);
+ var factories = [
+ coreHttp.tracingPolicy({ userAgent: telemetryPolicy.telemetryString }),
+ coreHttp.keepAlivePolicy(pipelineOptions.keepAliveOptions),
+ telemetryPolicy,
+ coreHttp.generateClientRequestIdPolicy(),
+ new StorageBrowserPolicyFactory(),
+ coreHttp.deserializationPolicy(),
+ new StorageRetryPolicyFactory(pipelineOptions.retryOptions),
+ coreHttp.logPolicy({
+ logger: logger.info,
+ allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames,
+ allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters
+ })
+ ];
+ {
+ // policies only available in Node.js runtime, not in browsers
+ factories.push(coreHttp.proxyPolicy(pipelineOptions.proxyOptions));
+ factories.push(coreHttp.disableResponseDecompressionPolicy());
}
- AvroReadableFromStream.prototype.toUint8Array = function (data) {
- if (typeof data === "string") {
- return Buffer.from(data);
- }
- return data;
- };
- Object.defineProperty(AvroReadableFromStream.prototype, "position", {
- get: function () {
- return this._position;
- },
- enumerable: false,
- configurable: true
- });
- AvroReadableFromStream.prototype.read = function (size, options) {
- var _a;
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var chunk;
- var _this = this;
- return tslib.__generator(this, function (_b) {
- if ((_a = options.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) {
- throw ABORT_ERROR$1;
- }
- if (size < 0) {
- throw new Error("size parameter should be positive: " + size);
- }
- if (size === 0) {
- return [2 /*return*/, new Uint8Array()];
- }
- if (!this._readable.readable) {
- throw new Error("Stream no longer readable.");
- }
- chunk = this._readable.read(size);
- if (chunk) {
- this._position += chunk.length;
- // chunk.length maybe less than desired size if the stream ends.
- return [2 /*return*/, this.toUint8Array(chunk)];
- }
- else {
- // register callback to wait for enough data to read
- return [2 /*return*/, new Promise(function (resolve, reject) {
- var cleanUp = function () {
- _this._readable.removeListener("readable", readableCallback);
- _this._readable.removeListener("error", rejectCallback);
- _this._readable.removeListener("end", rejectCallback);
- _this._readable.removeListener("close", rejectCallback);
- if (options.abortSignal) {
- options.abortSignal.removeEventListener("abort", abortHandler);
- }
- };
- var readableCallback = function () {
- var chunk = _this._readable.read(size);
- if (chunk) {
- _this._position += chunk.length;
- cleanUp();
- // chunk.length maybe less than desired size if the stream ends.
- resolve(_this.toUint8Array(chunk));
- }
- };
- var rejectCallback = function () {
- cleanUp();
- reject();
- };
- var abortHandler = function () {
- cleanUp();
- reject(ABORT_ERROR$1);
- };
- _this._readable.on("readable", readableCallback);
- _this._readable.once("error", rejectCallback);
- _this._readable.once("end", rejectCallback);
- _this._readable.once("close", rejectCallback);
- if (options.abortSignal) {
- options.abortSignal.addEventListener("abort", abortHandler);
- }
- })];
- }
- });
- });
- };
- return AvroReadableFromStream;
-}(AvroReadable));
+ factories.push(coreHttp.isTokenCredential(credential)
+ ? coreHttp.bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes)
+ : credential);
+ return new Pipeline(factories, pipelineOptions);
+}
// Copyright (c) Microsoft Corporation. All rights reserved.
+var ABORT_ERROR = new abortController.AbortError("The operation was aborted.");
/**
* ONLY AVAILABLE IN NODE.JS RUNTIME.
*
- * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query.
+ * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends.
*
- * @class BlobQuickQueryStream
+ * @class RetriableReadableStream
* @extends {Readable}
*/
-var BlobQuickQueryStream = /** @class */ (function (_super) {
- tslib.__extends(BlobQuickQueryStream, _super);
+var RetriableReadableStream = /** @class */ (function (_super) {
+ tslib.__extends(RetriableReadableStream, _super);
/**
- * Creates an instance of BlobQuickQueryStream.
+ * Creates an instance of RetriableReadableStream.
*
* @param {NodeJS.ReadableStream} source The current ReadableStream returned from getter
- * @param {BlobQuickQueryStreamOptions} [options={}]
- * @memberof BlobQuickQueryStream
+ * @param {ReadableStreamGetter} getter A method calling downloading request returning
+ * a new ReadableStream from specified offset
+ * @param {number} offset Offset position in original data source to read
+ * @param {number} count How much data in original data source to read
+ * @param {RetriableReadableStreamOptions} [options={}]
+ * @memberof RetriableReadableStream
*/
- function BlobQuickQueryStream(source, options) {
+ function RetriableReadableStream(source, getter, offset, count, options) {
if (options === void 0) { options = {}; }
var _this = _super.call(this) || this;
+ _this.retries = 0;
+ _this.abortHandler = function () {
+ _this.source.pause();
+ _this.emit("error", ABORT_ERROR);
+ };
+ _this.aborter = options.abortSignal || abortController.AbortSignal.none;
+ _this.getter = getter;
_this.source = source;
+ _this.start = offset;
+ _this.offset = offset;
+ _this.end = offset + count - 1;
+ _this.maxRetryRequests =
+ options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0;
_this.onProgress = options.onProgress;
- _this.onError = options.onError;
- _this.avroReader = new AvroReader(new AvroReadableFromStream(_this.source));
- _this.avroIter = _this.avroReader.parseObjects({ abortSignal: options.abortSignal });
+ _this.options = options;
+ _this.aborter.addEventListener("abort", _this.abortHandler);
+ _this.setSourceDataHandler();
+ _this.setSourceEndHandler();
+ _this.setSourceErrorHandler();
return _this;
}
- BlobQuickQueryStream.prototype._read = function () {
+ RetriableReadableStream.prototype._read = function () {
+ if (!this.aborter.aborted) {
+ this.source.resume();
+ }
+ };
+ RetriableReadableStream.prototype.setSourceDataHandler = function () {
var _this = this;
- this.readInternal().catch(function (err) {
- _this.emit("error", err);
+ this.source.on("data", function (data) {
+ if (_this.options.doInjectErrorOnce) {
+ _this.options.doInjectErrorOnce = undefined;
+ _this.source.pause();
+ _this.source.removeAllListeners("data");
+ _this.source.emit("end");
+ return;
+ }
+ // console.log(
+ // `Offset: ${this.offset}, Received ${data.length} from internal stream`
+ // );
+ _this.offset += data.length;
+ if (_this.onProgress) {
+ _this.onProgress({ loadedBytes: _this.offset - _this.start });
+ }
+ if (!_this.push(data)) {
+ _this.source.pause();
+ }
});
};
- BlobQuickQueryStream.prototype.readInternal = function () {
- var e_1, _a;
- return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, _c, obj, schema, exit, data, bytesScanned, totalBytes, fatal, name_1, description, position, e_1_1;
- return tslib.__generator(this, function (_d) {
- switch (_d.label) {
- case 0:
- _d.trys.push([0, 5, 6, 11]);
- _b = tslib.__asyncValues(this.avroIter);
- _d.label = 1;
- case 1: return [4 /*yield*/, _b.next()];
- case 2:
- if (!(_c = _d.sent(), !_c.done)) return [3 /*break*/, 4];
- obj = _c.value;
- schema = obj.$schema;
- if (typeof schema !== "string") {
- throw Error("Missing schema in avro record.");
- }
- exit = false;
- switch (schema) {
- case "com.microsoft.azure.storage.queryBlobContents.resultData":
- data = obj.data;
- if (data instanceof Uint8Array === false) {
- throw Error("Invalid data in avro result record.");
- }
- if (!this.push(Buffer.from(data))) {
- exit = true;
- }
- break;
- case "com.microsoft.azure.storage.queryBlobContents.progress":
- bytesScanned = obj.bytesScanned;
- if (typeof bytesScanned !== "number") {
- throw Error("Invalid bytesScanned in avro progress record.");
- }
- if (this.onProgress) {
- this.onProgress({ loadedBytes: bytesScanned });
- }
- break;
- case "com.microsoft.azure.storage.queryBlobContents.end":
- if (this.onProgress) {
- totalBytes = obj.totalBytes;
- if (typeof totalBytes !== "number") {
- throw Error("Invalid totalBytes in avro end record.");
- }
- this.onProgress({ loadedBytes: totalBytes });
- }
- this.push(null);
- break;
- case "com.microsoft.azure.storage.queryBlobContents.error":
- if (this.onError) {
- fatal = obj.fatal;
- if (typeof fatal !== "boolean") {
- throw Error("Invalid fatal in avro error record.");
- }
- name_1 = obj.name;
- if (typeof name_1 !== "string") {
- throw Error("Invalid name in avro error record.");
- }
- description = obj.description;
- if (typeof description !== "string") {
- throw Error("Invalid description in avro error record.");
- }
- position = obj.position;
- if (typeof position !== "number") {
- throw Error("Invalid position in avro error record.");
- }
- this.onError({
- position: position,
- name: name_1,
- isFatal: fatal,
- description: description
- });
- }
- break;
- default:
- throw Error("Unknown schema " + schema + " in avro progress record.");
- }
- if (exit) {
- return [3 /*break*/, 4];
- }
- _d.label = 3;
- case 3: return [3 /*break*/, 1];
- case 4: return [3 /*break*/, 11];
- case 5:
- e_1_1 = _d.sent();
- e_1 = { error: e_1_1 };
- return [3 /*break*/, 11];
- case 6:
- _d.trys.push([6, , 9, 10]);
- if (!(_c && !_c.done && (_a = _b.return))) return [3 /*break*/, 8];
- return [4 /*yield*/, _a.call(_b)];
- case 7:
- _d.sent();
- _d.label = 8;
- case 8: return [3 /*break*/, 10];
- case 9:
- if (e_1) throw e_1.error;
- return [7 /*endfinally*/];
- case 10: return [7 /*endfinally*/];
- case 11: return [2 /*return*/];
+ RetriableReadableStream.prototype.setSourceEndHandler = function () {
+ var _this = this;
+ this.source.on("end", function () {
+ // console.log(
+ // `Source stream emits end, offset: ${
+ // this.offset
+ // }, dest end : ${this.end}`
+ // );
+ if (_this.offset - 1 === _this.end) {
+ _this.aborter.removeEventListener("abort", _this.abortHandler);
+ _this.push(null);
+ }
+ else if (_this.offset <= _this.end) {
+ // console.log(
+ // `retries: ${this.retries}, max retries: ${this.maxRetries}`
+ // );
+ if (_this.retries < _this.maxRetryRequests) {
+ _this.retries += 1;
+ _this.getter(_this.offset)
+ .then(function (newSource) {
+ _this.source = newSource;
+ _this.setSourceDataHandler();
+ _this.setSourceEndHandler();
+ _this.setSourceErrorHandler();
+ })
+ .catch(function (error) {
+ _this.emit("error", error);
+ });
}
- });
+ else {
+ _this.emit("error", new Error(
+ // tslint:disable-next-line:max-line-length
+ "Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: " + (_this
+ .offset - 1) + ", data needed offset: " + _this.end + ", retries: " + _this.retries + ", max retries: " + _this.maxRetryRequests));
+ }
+ }
+ else {
+ _this.emit("error", new Error("Data corruption failure: Received more data than original request, data needed offset is " + _this.end + ", received offset: " + (_this.offset - 1)));
+ }
});
};
- return BlobQuickQueryStream;
+ RetriableReadableStream.prototype.setSourceErrorHandler = function () {
+ var _this = this;
+ this.source.on("error", function (error) {
+ _this.emit("error", error);
+ });
+ };
+ return RetriableReadableStream;
}(stream.Readable));
// Copyright (c) Microsoft Corporation. All rights reserved.
/**
* ONLY AVAILABLE IN NODE.JS RUNTIME.
*
- * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will
- * parse avor data returned by blob query.
+ * BlobDownloadResponse implements BlobDownloadResponseParsed interface, and in Node.js runtime it will
+ * automatically retry when internal read stream unexpected ends. (This kind of unexpected ends cannot
+ * trigger retries defined in pipeline retry policy.)
+ *
+ * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js
+ * Readable stream.
*
* @export
- * @class BlobQueryResponse
- * @implements {BlobDownloadResponseModel}
+ * @class BlobDownloadResponse
+ * @implements {BlobDownloadResponseParsed}
*/
-var BlobQueryResponse = /** @class */ (function () {
+var BlobDownloadResponse = /** @class */ (function () {
/**
- * Creates an instance of BlobQueryResponse.
+ * Creates an instance of BlobDownloadResponse.
*
- * @param {BlobQueryResponseModel} originalResponse
- * @param {BlobQuickQueryStreamOptions} [options={}]
- * @memberof BlobQueryResponse
+ * @param {BlobDownloadResponseParsed} originalResponse
+ * @param {ReadableStreamGetter} getter
+ * @param {number} offset
+ * @param {number} count
+ * @param {RetriableReadableStreamOptions} [options={}]
+ * @memberof BlobDownloadResponse
*/
- function BlobQueryResponse(originalResponse, options) {
+ function BlobDownloadResponse(originalResponse, getter, offset, count, options) {
if (options === void 0) { options = {}; }
this.originalResponse = originalResponse;
- this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options);
+ this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options);
}
- Object.defineProperty(BlobQueryResponse.prototype, "acceptRanges", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "acceptRanges", {
/**
* Indicates that the service supports
* requests for partial file content.
*
* @readonly
* @type {(string | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.acceptRanges;
@@ -45464,14 +45393,14 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "cacheControl", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "cacheControl", {
/**
* Returns if it was previously specified
* for the file.
*
* @readonly
* @type {(string | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.cacheControl;
@@ -45479,7 +45408,7 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "contentDisposition", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "contentDisposition", {
/**
* Returns the value that was specified
* for the 'x-ms-content-disposition' header and specifies how to process the
@@ -45487,7 +45416,7 @@ var BlobQueryResponse = /** @class */ (function () {
*
* @readonly
* @type {(string | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.contentDisposition;
@@ -45495,14 +45424,14 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "contentEncoding", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "contentEncoding", {
/**
* Returns the value that was specified
* for the Content-Encoding request header.
*
* @readonly
* @type {(string | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.contentEncoding;
@@ -45510,14 +45439,14 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "contentLanguage", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "contentLanguage", {
/**
* Returns the value that was specified
* for the Content-Language request header.
*
* @readonly
* @type {(string | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.contentLanguage;
@@ -45525,14 +45454,14 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "blobSequenceNumber", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "blobSequenceNumber", {
/**
* The current sequence number for a
* page blob. This header is not returned for block blobs or append blobs.
*
* @readonly
* @type {(number | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.blobSequenceNumber;
@@ -45540,14 +45469,14 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "blobType", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "blobType", {
/**
* The blob's type. Possible values include:
* 'BlockBlob', 'PageBlob', 'AppendBlob'.
*
* @readonly
* @type {(BlobType | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.blobType;
@@ -45555,14 +45484,14 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "contentLength", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "contentLength", {
/**
* The number of bytes present in the
* response body.
*
* @readonly
* @type {(number | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.contentLength;
@@ -45570,7 +45499,7 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "contentMD5", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "contentMD5", {
/**
* If the file has an MD5 hash and the
* request is to read the full file, this response header is returned so that
@@ -45583,7 +45512,7 @@ var BlobQueryResponse = /** @class */ (function () {
*
* @readonly
* @type {(Uint8Array | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.contentMD5;
@@ -45591,7 +45520,7 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "contentRange", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "contentRange", {
/**
* Indicates the range of bytes returned if
* the client requested a subset of the file by setting the Range request
@@ -45599,7 +45528,7 @@ var BlobQueryResponse = /** @class */ (function () {
*
* @readonly
* @type {(string | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.contentRange;
@@ -45607,14 +45536,14 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "contentType", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "contentType", {
/**
* The content type specified for the file.
* The default content type is 'application/octet-stream'
*
* @readonly
* @type {(string | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.contentType;
@@ -45622,7 +45551,7 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "copyCompletedOn", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "copyCompletedOn", {
/**
* Conclusion time of the last attempted
* Copy File operation where this file was the destination file. This value
@@ -45630,22 +45559,22 @@ var BlobQueryResponse = /** @class */ (function () {
*
* @readonly
* @type {(Date | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
- return undefined;
+ return this.originalResponse.copyCompletedOn;
},
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "copyId", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "copyId", {
/**
* String identifier for the last attempted Copy
* File operation where this file was the destination file.
*
* @readonly
* @type {(string | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.copyId;
@@ -45653,7 +45582,7 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "copyProgress", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "copyProgress", {
/**
* Contains the number of bytes copied and
* the total bytes in the source in the last attempted Copy File operation
@@ -45662,7 +45591,7 @@ var BlobQueryResponse = /** @class */ (function () {
*
* @readonly
* @type {(string | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.copyProgress;
@@ -45670,7 +45599,7 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "copySource", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "copySource", {
/**
* URL up to 2KB in length that specifies the
* source file used in the last attempted Copy File operation where this file
@@ -45678,7 +45607,7 @@ var BlobQueryResponse = /** @class */ (function () {
*
* @readonly
* @type {(string | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.copySource;
@@ -45686,7 +45615,7 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "copyStatus", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "copyStatus", {
/**
* State of the copy operation
* identified by 'x-ms-copy-id'. Possible values include: 'pending',
@@ -45694,7 +45623,7 @@ var BlobQueryResponse = /** @class */ (function () {
*
* @readonly
* @type {(CopyStatusType | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.copyStatus;
@@ -45702,7 +45631,7 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "copyStatusDescription", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "copyStatusDescription", {
/**
* Only appears when
* x-ms-copy-status is failed or pending. Describes cause of fatal or
@@ -45710,7 +45639,7 @@ var BlobQueryResponse = /** @class */ (function () {
*
* @readonly
* @type {(string | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.copyStatusDescription;
@@ -45718,7 +45647,7 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "leaseDuration", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "leaseDuration", {
/**
* When a blob is leased,
* specifies whether the lease is of infinite or fixed duration. Possible
@@ -45726,7 +45655,7 @@ var BlobQueryResponse = /** @class */ (function () {
*
* @readonly
* @type {(LeaseDurationType | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.leaseDuration;
@@ -45734,14 +45663,14 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "leaseState", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "leaseState", {
/**
* Lease state of the blob. Possible
* values include: 'available', 'leased', 'expired', 'breaking', 'broken'.
*
* @readonly
* @type {(LeaseStateType | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.leaseState;
@@ -45749,14 +45678,14 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "leaseStatus", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "leaseStatus", {
/**
* The current lease status of the
* blob. Possible values include: 'locked', 'unlocked'.
*
* @readonly
* @type {(LeaseStatusType | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.leaseStatus;
@@ -45764,14 +45693,14 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "date", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "date", {
/**
* A UTC date/time value generated by the service that
* indicates the time at which the response was initiated.
*
* @readonly
* @type {(Date | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.date;
@@ -45779,14 +45708,14 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "blobCommittedBlockCount", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "blobCommittedBlockCount", {
/**
* The number of committed blocks
* present in the blob. This header is returned only for append blobs.
*
* @readonly
* @type {(number | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.blobCommittedBlockCount;
@@ -45794,14 +45723,14 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "etag", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "etag", {
/**
* The ETag contains a value that you can use to
* perform operations conditionally, in quotes.
*
* @readonly
* @type {(string | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.etag;
@@ -45809,13 +45738,27 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "errorCode", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "tagCount", {
+ /**
+ * The number of tags associated with the blob
+ *
+ * @readonly
+ * @type {(number | undefined)}
+ * @memberof BlobDownloadResponse
+ */
+ get: function () {
+ return this.originalResponse.tagCount;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobDownloadResponse.prototype, "errorCode", {
/**
* The error code.
*
* @readonly
* @type {(string | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.errorCode;
@@ -45823,7 +45766,7 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "isServerEncrypted", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "isServerEncrypted", {
/**
* The value of this header is set to
* true if the file data and application metadata are completely encrypted
@@ -45833,7 +45776,7 @@ var BlobQueryResponse = /** @class */ (function () {
*
* @readonly
* @type {(boolean | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.isServerEncrypted;
@@ -45841,7 +45784,7 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "blobContentMD5", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "blobContentMD5", {
/**
* If the blob has a MD5 hash, and if
* request contains range header (Range or x-ms-range), this response header
@@ -45851,7 +45794,7 @@ var BlobQueryResponse = /** @class */ (function () {
*
* @readonly
* @type {(Uint8Array | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.blobContentMD5;
@@ -45859,7 +45802,7 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "lastModified", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "lastModified", {
/**
* Returns the date and time the file was last
* modified. Any operation that modifies the file or its properties updates
@@ -45867,7 +45810,7 @@ var BlobQueryResponse = /** @class */ (function () {
*
* @readonly
* @type {(Date | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.lastModified;
@@ -45875,14 +45818,14 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "metadata", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "metadata", {
/**
* A name-value pair
* to associate with a file storage object.
*
* @readonly
* @type {(Metadata | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.metadata;
@@ -45890,14 +45833,14 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "requestId", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "requestId", {
/**
* This header uniquely identifies the request
* that was made and can be used for troubleshooting the request.
*
* @readonly
* @type {(string | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.requestId;
@@ -45905,14 +45848,14 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "clientRequestId", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "clientRequestId", {
/**
* If a client request id header is sent in the request, this header will be present in the
* response with the same value.
*
* @readonly
* @type {(string | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.clientRequestId;
@@ -45920,14 +45863,14 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "version", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "version", {
/**
- * Indicates the version of the File service used
+ * Indicates the version of the Blob service used
* to execute the request.
*
* @readonly
* @type {(string | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.version;
@@ -45935,14 +45878,28 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "encryptionKeySha256", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "versionId", {
+ /**
+ * Indicates the versionId of the downloaded blob version.
+ *
+ * @readonly
+ * @type {(string | undefined)}
+ * @memberof BlobDownloadResponse
+ */
+ get: function () {
+ return this.originalResponse.versionId;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobDownloadResponse.prototype, "encryptionKeySha256", {
/**
* The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned
* when the blob was encrypted with a customer-provided key.
*
* @readonly
* @type {(string | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.encryptionKeySha256;
@@ -45950,7 +45907,7 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "contentCrc64", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "contentCrc64", {
/**
* If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to
* true, then the request returns a crc64 for the range, as long as the range size is less than
@@ -45958,7 +45915,7 @@ var BlobQueryResponse = /** @class */ (function () {
* specified in the same request, it will fail with 400(Bad Request)
*
* @type {(Uint8Array | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse.contentCrc64;
@@ -45966,31 +45923,73 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "blobBody", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "objectReplicationDestinationPolicyId", {
+ /**
+ * Object Replication Policy Id of the destination blob.
+ *
+ * @readonly
+ * @type {(string| undefined)}
+ * @memberof BlobDownloadResponse
+ */
+ get: function () {
+ return this.originalResponse.objectReplicationDestinationPolicyId;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobDownloadResponse.prototype, "objectReplicationSourceProperties", {
+ /**
+ * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob.
+ *
+ * @readonly
+ * @type {(ObjectReplicationPolicy[] | undefined)}
+ * @memberof BlobDownloadResponse
+ */
+ get: function () {
+ return this.originalResponse.objectReplicationSourceProperties;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobDownloadResponse.prototype, "isSealed", {
+ /**
+ * If this blob has been sealed.
+ *
+ * @readonly
+ * @type {(boolean | undefined)}
+ * @memberof BlobDownloadResponse
+ */
+ get: function () {
+ return this.originalResponse.isSealed;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobDownloadResponse.prototype, "contentAsBlob", {
/**
* The response body as a browser Blob.
* Always undefined in node.js.
*
* @readonly
* @type {(Promise | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
- return undefined;
+ return this.originalResponse.blobBody;
},
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "readableStreamBody", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "readableStreamBody", {
/**
* The response body as a node.js Readable stream.
* Always undefined in the browser.
*
- * It will parse avor data returned by blob query.
+ * It will automatically retry when internal read stream unexpected ends.
*
* @readonly
* @type {(NodeJS.ReadableStream | undefined)}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return coreHttp.isNode ? this.blobDownloadStream : undefined;
@@ -45998,12 +45997,12 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobQueryResponse.prototype, "_response", {
+ Object.defineProperty(BlobDownloadResponse.prototype, "_response", {
/**
* The HTTP response.
*
* @type {HttpResponse}
- * @memberof BlobQueryResponse
+ * @memberof BlobDownloadResponse
*/
get: function () {
return this.originalResponse._response;
@@ -46011,3154 +46010,3023 @@ var BlobQueryResponse = /** @class */ (function () {
enumerable: false,
configurable: true
});
- return BlobQueryResponse;
+ return BlobDownloadResponse;
}());
-// Copyright (c) Microsoft Corporation. All rights reserved.
-/**
- * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key.
- *
- * @export
- * @class StorageSharedKeyCredentialPolicy
- * @extends {CredentialPolicy}
- */
-var StorageSharedKeyCredentialPolicy = /** @class */ (function (_super) {
- tslib.__extends(StorageSharedKeyCredentialPolicy, _super);
- /**
- * Creates an instance of StorageSharedKeyCredentialPolicy.
- * @param {RequestPolicy} nextPolicy
- * @param {RequestPolicyOptions} options
- * @param {StorageSharedKeyCredential} factory
- * @memberof StorageSharedKeyCredentialPolicy
- */
- function StorageSharedKeyCredentialPolicy(nextPolicy, options, factory) {
- var _this = _super.call(this, nextPolicy, options) || this;
- _this.factory = factory;
- return _this;
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+var AVRO_SYNC_MARKER_SIZE = 16;
+var AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]);
+var AVRO_CODEC_KEY = "avro.codec";
+var AVRO_SCHEMA_KEY = "avro.schema";
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+function arraysEqual(a, b) {
+ if (a === b)
+ return true;
+ if (a == null || b == null)
+ return false;
+ if (a.length != b.length)
+ return false;
+ for (var i = 0; i < a.length; ++i) {
+ if (a[i] !== b[i])
+ return false;
+ }
+ return true;
+}
+
+// Copyright (c) Microsoft Corporation.
+var AvroParser = /** @class */ (function () {
+ function AvroParser() {
}
/**
- * Signs request.
+ * Reads a fixed number of bytes from the stream.
*
- * @protected
- * @param {WebResource} request
- * @returns {WebResource}
- * @memberof StorageSharedKeyCredentialPolicy
+ * @static
+ * @param {AvroReadable} [stream]
+ * @param {number} [length]
+ * @param {AvroParserReadOptions} [options={}]
+ * @returns {Promise}
+ * @memberof AvroParser
*/
- StorageSharedKeyCredentialPolicy.prototype.signRequest = function (request) {
- request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString());
- if (request.body && typeof request.body === "string" && request.body.length > 0) {
- request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body));
- }
- var stringToSign = [
- request.method.toUpperCase(),
- this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE),
- this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING),
- this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH),
- this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5),
- this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE),
- this.getHeaderValueToSign(request, HeaderConstants.DATE),
- this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE),
- this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH),
- this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH),
- this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE),
- this.getHeaderValueToSign(request, HeaderConstants.RANGE)
- ].join("\n") +
- "\n" +
- this.getCanonicalizedHeadersString(request) +
- this.getCanonicalizedResourceString(request);
- var signature = this.factory.computeHMACSHA256(stringToSign);
- request.headers.set(HeaderConstants.AUTHORIZATION, "SharedKey " + this.factory.accountName + ":" + signature);
- // console.log(`[URL]:${request.url}`);
- // console.log(`[HEADERS]:${request.headers.toString()}`);
- // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`);
- // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`);
- return request;
+ AvroParser.readFixedBytes = function (stream, length, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var bytes;
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0: return [4 /*yield*/, stream.read(length, { abortSignal: options.abortSignal })];
+ case 1:
+ bytes = _a.sent();
+ if (bytes.length != length) {
+ throw new Error("Hit stream end.");
+ }
+ return [2 /*return*/, bytes];
+ }
+ });
+ });
};
/**
- * Retrieve header value according to shared key sign rules.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key
+ * Reads a single byte from the stream.
*
- * @private
- * @param {WebResource} request
- * @param {string} headerName
- * @returns {string}
- * @memberof StorageSharedKeyCredentialPolicy
+ * @static
+ * @param {AvroReadable} [stream]
+ * @param {AvroParserReadOptions} [options={}]
+ * @returns {Promise}
+ * @memberof AvroParser
*/
- StorageSharedKeyCredentialPolicy.prototype.getHeaderValueToSign = function (request, headerName) {
- var value = request.headers.get(headerName);
- if (!value) {
- return "";
- }
- // When using version 2015-02-21 or later, if Content-Length is zero, then
- // set the Content-Length part of the StringToSign to an empty string.
- // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key
- if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") {
- return "";
- }
- return value;
+ AvroParser.readByte = function (stream, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var buf;
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0: return [4 /*yield*/, AvroParser.readFixedBytes(stream, 1, options)];
+ case 1:
+ buf = _a.sent();
+ return [2 /*return*/, buf[0]];
+ }
+ });
+ });
};
- /**
- * To construct the CanonicalizedHeaders portion of the signature string, follow these steps:
- * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header.
- * 2. Convert each HTTP header name to lowercase.
- * 3. Sort the headers lexicographically by header name, in ascending order.
- * Each header may appear only once in the string.
- * 4. Replace any linear whitespace in the header value with a single space.
- * 5. Trim any whitespace around the colon in the header.
- * 6. Finally, append a new-line character to each canonicalized header in the resulting list.
- * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string.
- *
- * @private
- * @param {WebResource} request
- * @returns {string}
- * @memberof StorageSharedKeyCredentialPolicy
- */
- StorageSharedKeyCredentialPolicy.prototype.getCanonicalizedHeadersString = function (request) {
- var headersArray = request.headers.headersArray().filter(function (value) {
- return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE);
+ // int and long are stored in variable-length zig-zag coding.
+ // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt
+ // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types
+ AvroParser.readZigZagLong = function (stream, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var zigZagEncoded, significanceInBit, byte, haveMoreByte, significanceInFloat, res;
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0:
+ zigZagEncoded = 0;
+ significanceInBit = 0;
+ _a.label = 1;
+ case 1: return [4 /*yield*/, AvroParser.readByte(stream, options)];
+ case 2:
+ byte = _a.sent();
+ haveMoreByte = byte & 0x80;
+ zigZagEncoded |= (byte & 0x7f) << significanceInBit;
+ significanceInBit += 7;
+ _a.label = 3;
+ case 3:
+ if (haveMoreByte && significanceInBit < 28) return [3 /*break*/, 1];
+ _a.label = 4;
+ case 4:
+ if (!haveMoreByte) return [3 /*break*/, 9];
+ // Switch to float arithmetic
+ zigZagEncoded = zigZagEncoded;
+ significanceInFloat = 268435456; // 2 ** 28.
+ _a.label = 5;
+ case 5: return [4 /*yield*/, AvroParser.readByte(stream, options)];
+ case 6:
+ byte = _a.sent();
+ zigZagEncoded += (byte & 0x7f) * significanceInFloat;
+ significanceInFloat *= 128; // 2 ** 7
+ _a.label = 7;
+ case 7:
+ if (byte & 0x80) return [3 /*break*/, 5];
+ _a.label = 8;
+ case 8:
+ res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2;
+ if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) {
+ throw new Error("Integer overflow.");
+ }
+ return [2 /*return*/, res];
+ case 9: return [2 /*return*/, (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1)];
+ }
+ });
});
- headersArray.sort(function (a, b) {
- return a.name.toLowerCase().localeCompare(b.name.toLowerCase());
+ };
+ AvroParser.readLong = function (stream, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ return tslib.__generator(this, function (_a) {
+ return [2 /*return*/, AvroParser.readZigZagLong(stream, options)];
+ });
});
- // Remove duplicate headers
- headersArray = headersArray.filter(function (value, index, array) {
- if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) {
- return false;
- }
- return true;
+ };
+ AvroParser.readInt = function (stream, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ return tslib.__generator(this, function (_a) {
+ return [2 /*return*/, AvroParser.readZigZagLong(stream, options)];
+ });
});
- var canonicalizedHeadersStringToSign = "";
- headersArray.forEach(function (header) {
- canonicalizedHeadersStringToSign += header.name
- .toLowerCase()
- .trimRight() + ":" + header.value.trimLeft() + "\n";
+ };
+ AvroParser.readNull = function () {
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ return tslib.__generator(this, function (_a) {
+ return [2 /*return*/, null];
+ });
});
- return canonicalizedHeadersStringToSign;
};
- /**
- * Retrieves the webResource canonicalized resource string.
- *
- * @private
- * @param {WebResource} request
- * @returns {string}
- * @memberof StorageSharedKeyCredentialPolicy
- */
- StorageSharedKeyCredentialPolicy.prototype.getCanonicalizedResourceString = function (request) {
- var path = getURLPath(request.url) || "/";
- var canonicalizedResourceString = "";
- canonicalizedResourceString += "/" + this.factory.accountName + path;
- var queries = getURLQueries(request.url);
- var lowercaseQueries = {};
- if (queries) {
- var queryKeys = [];
- for (var key in queries) {
- if (queries.hasOwnProperty(key)) {
- var lowercaseKey = key.toLowerCase();
- lowercaseQueries[lowercaseKey] = queries[key];
- queryKeys.push(lowercaseKey);
+ AvroParser.readBoolean = function (stream, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var b;
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0: return [4 /*yield*/, AvroParser.readByte(stream, options)];
+ case 1:
+ b = _a.sent();
+ if (b == 1) {
+ return [2 /*return*/, true];
+ }
+ else if (b == 0) {
+ return [2 /*return*/, false];
+ }
+ else {
+ throw new Error("Byte was not a boolean.");
+ }
}
- }
- queryKeys.sort();
- for (var _i = 0, queryKeys_1 = queryKeys; _i < queryKeys_1.length; _i++) {
- var key = queryKeys_1[_i];
- canonicalizedResourceString += "\n" + key + ":" + decodeURIComponent(lowercaseQueries[key]);
- }
- }
- return canonicalizedResourceString;
+ });
+ });
};
- return StorageSharedKeyCredentialPolicy;
-}(CredentialPolicy));
-
-// Copyright (c) Microsoft Corporation. All rights reserved.
-/**
- * ONLY AVAILABLE IN NODE.JS RUNTIME.
- *
- * StorageSharedKeyCredential for account key authorization of Azure Storage service.
- *
- * @export
- * @class StorageSharedKeyCredential
- * @extends {Credential}
- */
-var StorageSharedKeyCredential = /** @class */ (function (_super) {
- tslib.__extends(StorageSharedKeyCredential, _super);
- /**
- * Creates an instance of StorageSharedKeyCredential.
- * @param {string} accountName
- * @param {string} accountKey
- * @memberof StorageSharedKeyCredential
- */
- function StorageSharedKeyCredential(accountName, accountKey) {
- var _this = _super.call(this) || this;
- _this.accountName = accountName;
- _this.accountKey = Buffer.from(accountKey, "base64");
- return _this;
- }
- /**
- * Creates a StorageSharedKeyCredentialPolicy object.
- *
- * @param {RequestPolicy} nextPolicy
- * @param {RequestPolicyOptions} options
- * @returns {StorageSharedKeyCredentialPolicy}
- * @memberof StorageSharedKeyCredential
- */
- StorageSharedKeyCredential.prototype.create = function (nextPolicy, options) {
- return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this);
+ AvroParser.readFloat = function (stream, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var u8arr, view;
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0: return [4 /*yield*/, AvroParser.readFixedBytes(stream, 4, options)];
+ case 1:
+ u8arr = _a.sent();
+ view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength);
+ return [2 /*return*/, view.getFloat32(0, true)]; // littleEndian = true
+ }
+ });
+ });
};
- /**
- * Generates a hash signature for an HTTP request or for a SAS.
- *
- * @param {string} stringToSign
- * @returns {string}
- * @memberof StorageSharedKeyCredential
- */
- StorageSharedKeyCredential.prototype.computeHMACSHA256 = function (stringToSign) {
- return crypto.createHmac("sha256", this.accountKey)
- .update(stringToSign, "utf8")
- .digest("base64");
+ AvroParser.readDouble = function (stream, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var u8arr, view;
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0: return [4 /*yield*/, AvroParser.readFixedBytes(stream, 8, options)];
+ case 1:
+ u8arr = _a.sent();
+ view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength);
+ return [2 /*return*/, view.getFloat64(0, true)]; // littleEndian = true
+ }
+ });
+ });
};
- return StorageSharedKeyCredential;
-}(Credential));
-
-/*
- * Copyright (c) Microsoft Corporation. All rights reserved.
- * Licensed under the MIT License. See License.txt in the project root for
- * license information.
- *
- * Code generated by Microsoft (R) AutoRest Code Generator.
- * Changes may cause incorrect behavior and will be lost if the code is
- * regenerated.
- */
-var packageName = "azure-storage-blob";
-var packageVersion = "12.2.1";
-var StorageClientContext = /** @class */ (function (_super) {
- tslib.__extends(StorageClientContext, _super);
+ AvroParser.readBytes = function (stream, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var size;
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0: return [4 /*yield*/, AvroParser.readLong(stream, options)];
+ case 1:
+ size = _a.sent();
+ if (size < 0) {
+ throw new Error("Bytes size was negative.");
+ }
+ return [4 /*yield*/, stream.read(size, { abortSignal: options.abortSignal })];
+ case 2: return [2 /*return*/, _a.sent()];
+ }
+ });
+ });
+ };
+ AvroParser.readString = function (stream, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var u8arr, utf8decoder;
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0: return [4 /*yield*/, AvroParser.readBytes(stream, options)];
+ case 1:
+ u8arr = _a.sent();
+ // polyfill TextDecoder to be backward compatible with older
+ // nodejs that doesn't expose TextDecoder as a global variable
+ if (typeof TextDecoder === "undefined" && "function" !== "undefined") {
+ global.TextDecoder = __webpack_require__(669).TextDecoder;
+ }
+ utf8decoder = new TextDecoder();
+ return [2 /*return*/, utf8decoder.decode(u8arr)];
+ }
+ });
+ });
+ };
+ AvroParser.readMapPair = function (stream, readItemMethod, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var key, value;
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0: return [4 /*yield*/, AvroParser.readString(stream, options)];
+ case 1:
+ key = _a.sent();
+ return [4 /*yield*/, readItemMethod(stream, options)];
+ case 2:
+ value = _a.sent();
+ return [2 /*return*/, { key: key, value: value }];
+ }
+ });
+ });
+ };
+ AvroParser.readMap = function (stream, readItemMethod, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var readPairMethod, pairs, dict, _i, pairs_1, pair;
+ var _this = this;
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0:
+ readPairMethod = function (stream, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(_this, void 0, void 0, function () {
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0: return [4 /*yield*/, AvroParser.readMapPair(stream, readItemMethod, options)];
+ case 1: return [2 /*return*/, _a.sent()];
+ }
+ });
+ });
+ };
+ return [4 /*yield*/, AvroParser.readArray(stream, readPairMethod, options)];
+ case 1:
+ pairs = _a.sent();
+ dict = {};
+ for (_i = 0, pairs_1 = pairs; _i < pairs_1.length; _i++) {
+ pair = pairs_1[_i];
+ dict[pair.key] = pair.value;
+ }
+ return [2 /*return*/, dict];
+ }
+ });
+ });
+ };
+ AvroParser.readArray = function (stream, readItemMethod, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var items, count, item;
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0:
+ items = [];
+ return [4 /*yield*/, AvroParser.readLong(stream, options)];
+ case 1:
+ count = _a.sent();
+ _a.label = 2;
+ case 2:
+ if (!(count != 0)) return [3 /*break*/, 8];
+ if (!(count < 0)) return [3 /*break*/, 4];
+ // Ignore block sizes
+ return [4 /*yield*/, AvroParser.readLong(stream, options)];
+ case 3:
+ // Ignore block sizes
+ _a.sent();
+ count = -count;
+ _a.label = 4;
+ case 4:
+ if (!count--) return [3 /*break*/, 6];
+ return [4 /*yield*/, readItemMethod(stream, options)];
+ case 5:
+ item = _a.sent();
+ items.push(item);
+ return [3 /*break*/, 4];
+ case 6: return [4 /*yield*/, AvroParser.readLong(stream, options)];
+ case 7:
+ count = _a.sent();
+ return [3 /*break*/, 2];
+ case 8: return [2 /*return*/, items];
+ }
+ });
+ });
+ };
+ return AvroParser;
+}());
+var AvroComplex;
+(function (AvroComplex) {
+ AvroComplex["RECORD"] = "record";
+ AvroComplex["ENUM"] = "enum";
+ AvroComplex["ARRAY"] = "array";
+ AvroComplex["MAP"] = "map";
+ AvroComplex["UNION"] = "union";
+ AvroComplex["FIXED"] = "fixed";
+})(AvroComplex || (AvroComplex = {}));
+var AvroType = /** @class */ (function () {
+ function AvroType() {
+ }
/**
- * Initializes a new instance of the StorageClientContext class.
- * @param url The URL of the service account, container, or blob that is the targe of the desired
- * operation.
- * @param [options] The parameter options
+ * Determines the AvroType from the Avro Schema.
*/
- function StorageClientContext(url, options) {
- var _this = this;
- if (url == undefined) {
- throw new Error("'url' cannot be null.");
+ AvroType.fromSchema = function (schema) {
+ if (typeof schema == "string") {
+ return AvroType.fromStringSchema(schema);
}
- if (!options) {
- options = {};
+ else if (Array.isArray(schema)) {
+ return AvroType.fromArraySchema(schema);
}
- if (!options.userAgent) {
- var defaultUserAgent = coreHttp.getDefaultUserAgentValue();
- options.userAgent = packageName + "/" + packageVersion + " " + defaultUserAgent;
+ else {
+ return AvroType.fromObjectSchema(schema);
}
- _this = _super.call(this, undefined, options) || this;
- _this.version = "2019-12-12";
- _this.baseUri = "{url}";
- _this.requestContentType = "application/json; charset=utf-8";
- _this.url = url;
- return _this;
- }
- return StorageClientContext;
-}(coreHttp.ServiceClient));
-
-// Copyright (c) Microsoft Corporation. All rights reserved.
-(function (BlockBlobTier) {
- /**
- * Optimized for storing data that is accessed frequently.
- */
- BlockBlobTier["Hot"] = "Hot";
- /**
- * Optimized for storing data that is infrequently accessed and stored for at least 30 days.
- */
- BlockBlobTier["Cool"] = "Cool";
- /**
- * Optimized for storing data that is rarely accessed and stored for at least 180 days
- * with flexible latency requirements (on the order of hours).
- */
- BlockBlobTier["Archive"] = "Archive";
-})(exports.BlockBlobTier || (exports.BlockBlobTier = {}));
-(function (PremiumPageBlobTier) {
- /**
- * P4 Tier.
- */
- PremiumPageBlobTier["P4"] = "P4";
- /**
- * P6 Tier.
- */
- PremiumPageBlobTier["P6"] = "P6";
- /**
- * P10 Tier.
- */
- PremiumPageBlobTier["P10"] = "P10";
- /**
- * P15 Tier.
- */
- PremiumPageBlobTier["P15"] = "P15";
- /**
- * P20 Tier.
- */
- PremiumPageBlobTier["P20"] = "P20";
- /**
- * P30 Tier.
- */
- PremiumPageBlobTier["P30"] = "P30";
- /**
- * P40 Tier.
- */
- PremiumPageBlobTier["P40"] = "P40";
- /**
- * P50 Tier.
- */
- PremiumPageBlobTier["P50"] = "P50";
- /**
- * P60 Tier.
- */
- PremiumPageBlobTier["P60"] = "P60";
- /**
- * P70 Tier.
- */
- PremiumPageBlobTier["P70"] = "P70";
- /**
- * P80 Tier.
- */
- PremiumPageBlobTier["P80"] = "P80";
-})(exports.PremiumPageBlobTier || (exports.PremiumPageBlobTier = {}));
-function toAccessTier(tier) {
- if (tier == undefined) {
- return undefined;
- }
- return tier; // No more check if string is a valid AccessTier, and left this to underlay logic to decide(service).
-}
-function ensureCpkIfSpecified(cpk, isHttps) {
- if (cpk && !isHttps) {
- throw new RangeError("Customer-provided encryption key must be used over HTTPS.");
- }
- if (cpk && !cpk.encryptionAlgorithm) {
- cpk.encryptionAlgorithm = EncryptionAlgorithmAES25;
- }
-}
-
-/**
- * Function that converts PageRange and ClearRange to a common Range object.
- * PageRange and ClearRange have start and end while Range offset and count
- * this function normalizes to Range.
- * @param response Model PageBlob Range response
- */
-function rangeResponseFromModel(response) {
- var pageRange = (response._response.parsedBody.pageRange || []).map(function (x) { return ({
- offset: x.start,
- count: x.end - x.start
- }); });
- var clearRange = (response._response.parsedBody.clearRange || []).map(function (x) { return ({
- offset: x.start,
- count: x.end - x.start
- }); });
- return tslib.__assign(tslib.__assign({}, response), { pageRange: pageRange,
- clearRange: clearRange, _response: tslib.__assign(tslib.__assign({}, response._response), { parsedBody: {
- pageRange: pageRange,
- clearRange: clearRange
- } }) });
-}
-
-// Copyright (c) Microsoft Corporation. All rights reserved.
-/**
- * This is the poller returned by {@link BlobClient.beginCopyFromURL}.
- * This can not be instantiated directly outside of this package.
- *
- * @ignore
- */
-var BlobBeginCopyFromUrlPoller = /** @class */ (function (_super) {
- tslib.__extends(BlobBeginCopyFromUrlPoller, _super);
- function BlobBeginCopyFromUrlPoller(options) {
- var _this = this;
- var blobClient = options.blobClient, copySource = options.copySource, _a = options.intervalInMs, intervalInMs = _a === void 0 ? 15000 : _a, onProgress = options.onProgress, resumeFrom = options.resumeFrom, startCopyFromURLOptions = options.startCopyFromURLOptions;
- var state;
- if (resumeFrom) {
- state = JSON.parse(resumeFrom).state;
+ };
+ AvroType.fromStringSchema = function (schema) {
+ switch (schema) {
+ case AvroPrimitive.NULL:
+ case AvroPrimitive.BOOLEAN:
+ case AvroPrimitive.INT:
+ case AvroPrimitive.LONG:
+ case AvroPrimitive.FLOAT:
+ case AvroPrimitive.DOUBLE:
+ case AvroPrimitive.BYTES:
+ case AvroPrimitive.STRING:
+ return new AvroPrimitiveType(schema);
+ default:
+ throw new Error("Unexpected Avro type " + schema);
}
- var operation = makeBlobBeginCopyFromURLPollOperation(tslib.__assign(tslib.__assign({}, state), { blobClient: blobClient,
- copySource: copySource,
- startCopyFromURLOptions: startCopyFromURLOptions }));
- _this = _super.call(this, operation) || this;
- if (typeof onProgress === "function") {
- _this.onProgress(onProgress);
+ };
+ AvroType.fromArraySchema = function (schema) {
+ return new AvroUnionType(schema.map(AvroType.fromSchema));
+ };
+ AvroType.fromObjectSchema = function (schema) {
+ var type = schema.type;
+ // Primitives can be defined as strings or objects
+ try {
+ return AvroType.fromStringSchema(type);
}
- _this.intervalInMs = intervalInMs;
+ catch (err) { }
+ switch (type) {
+ case AvroComplex.RECORD:
+ if (schema.aliases) {
+ throw new Error("aliases currently is not supported, schema: " + schema);
+ }
+ if (!schema.name) {
+ throw new Error("Required attribute 'name' doesn't exist on schema: " + schema);
+ }
+ var fields = {};
+ if (!schema.fields) {
+ throw new Error("Required attribute 'fields' doesn't exist on schema: " + schema);
+ }
+ for (var _i = 0, _a = schema.fields; _i < _a.length; _i++) {
+ var field = _a[_i];
+ fields[field.name] = AvroType.fromSchema(field.type);
+ }
+ return new AvroRecordType(fields, schema.name);
+ case AvroComplex.ENUM:
+ if (schema.aliases) {
+ throw new Error("aliases currently is not supported, schema: " + schema);
+ }
+ if (!schema.symbols) {
+ throw new Error("Required attribute 'symbols' doesn't exist on schema: " + schema);
+ }
+ return new AvroEnumType(schema.symbols);
+ case AvroComplex.MAP:
+ if (!schema.values) {
+ throw new Error("Required attribute 'values' doesn't exist on schema: " + schema);
+ }
+ return new AvroMapType(AvroType.fromSchema(schema.values));
+ case AvroComplex.ARRAY: // Unused today
+ case AvroComplex.FIXED: // Unused today
+ default:
+ throw new Error("Unexpected Avro type " + type + " in " + schema);
+ }
+ };
+ return AvroType;
+}());
+var AvroPrimitive;
+(function (AvroPrimitive) {
+ AvroPrimitive["NULL"] = "null";
+ AvroPrimitive["BOOLEAN"] = "boolean";
+ AvroPrimitive["INT"] = "int";
+ AvroPrimitive["LONG"] = "long";
+ AvroPrimitive["FLOAT"] = "float";
+ AvroPrimitive["DOUBLE"] = "double";
+ AvroPrimitive["BYTES"] = "bytes";
+ AvroPrimitive["STRING"] = "string";
+})(AvroPrimitive || (AvroPrimitive = {}));
+var AvroPrimitiveType = /** @class */ (function (_super) {
+ tslib.__extends(AvroPrimitiveType, _super);
+ function AvroPrimitiveType(primitive) {
+ var _this = _super.call(this) || this;
+ _this._primitive = primitive;
return _this;
}
- BlobBeginCopyFromUrlPoller.prototype.delay = function () {
- return coreHttp.delay(this.intervalInMs);
+ AvroPrimitiveType.prototype.read = function (stream, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var _a;
+ return tslib.__generator(this, function (_b) {
+ switch (_b.label) {
+ case 0:
+ _a = this._primitive;
+ switch (_a) {
+ case AvroPrimitive.NULL: return [3 /*break*/, 1];
+ case AvroPrimitive.BOOLEAN: return [3 /*break*/, 3];
+ case AvroPrimitive.INT: return [3 /*break*/, 5];
+ case AvroPrimitive.LONG: return [3 /*break*/, 7];
+ case AvroPrimitive.FLOAT: return [3 /*break*/, 9];
+ case AvroPrimitive.DOUBLE: return [3 /*break*/, 11];
+ case AvroPrimitive.BYTES: return [3 /*break*/, 13];
+ case AvroPrimitive.STRING: return [3 /*break*/, 15];
+ }
+ return [3 /*break*/, 17];
+ case 1: return [4 /*yield*/, AvroParser.readNull()];
+ case 2: return [2 /*return*/, _b.sent()];
+ case 3: return [4 /*yield*/, AvroParser.readBoolean(stream, options)];
+ case 4: return [2 /*return*/, _b.sent()];
+ case 5: return [4 /*yield*/, AvroParser.readInt(stream, options)];
+ case 6: return [2 /*return*/, _b.sent()];
+ case 7: return [4 /*yield*/, AvroParser.readLong(stream, options)];
+ case 8: return [2 /*return*/, _b.sent()];
+ case 9: return [4 /*yield*/, AvroParser.readFloat(stream, options)];
+ case 10: return [2 /*return*/, _b.sent()];
+ case 11: return [4 /*yield*/, AvroParser.readDouble(stream, options)];
+ case 12: return [2 /*return*/, _b.sent()];
+ case 13: return [4 /*yield*/, AvroParser.readBytes(stream, options)];
+ case 14: return [2 /*return*/, _b.sent()];
+ case 15: return [4 /*yield*/, AvroParser.readString(stream, options)];
+ case 16: return [2 /*return*/, _b.sent()];
+ case 17: throw new Error("Unknown Avro Primitive");
+ }
+ });
+ });
};
- return BlobBeginCopyFromUrlPoller;
-}(coreLro.Poller));
-/**
- * Note: Intentionally using function expression over arrow function expression
- * so that the function can be invoked with a different context.
- * This affects what `this` refers to.
- * @ignore
- */
-var cancel = function cancel(options) {
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var state, copyId;
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0:
- state = this.state;
- copyId = state.copyId;
- if (state.isCompleted) {
- return [2 /*return*/, makeBlobBeginCopyFromURLPollOperation(state)];
- }
- if (!copyId) {
- state.isCancelled = true;
- return [2 /*return*/, makeBlobBeginCopyFromURLPollOperation(state)];
- }
- // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call
- return [4 /*yield*/, state.blobClient.abortCopyFromURL(copyId, {
- abortSignal: options.abortSignal
- })];
- case 1:
- // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call
- _a.sent();
- state.isCancelled = true;
- return [2 /*return*/, makeBlobBeginCopyFromURLPollOperation(state)];
- }
- });
- });
-};
-/**
- * Note: Intentionally using function expression over arrow function expression
- * so that the function can be invoked with a different context.
- * This affects what `this` refers to.
- * @ignore
- */
-var update = function update(options) {
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var state, blobClient, copySource, startCopyFromURLOptions, result, result, copyStatus, copyProgress, prevCopyProgress, err_1;
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0:
- state = this.state;
- blobClient = state.blobClient, copySource = state.copySource, startCopyFromURLOptions = state.startCopyFromURLOptions;
- if (!!state.isStarted) return [3 /*break*/, 2];
- state.isStarted = true;
- return [4 /*yield*/, blobClient.startCopyFromURL(copySource, startCopyFromURLOptions)];
- case 1:
- result = _a.sent();
- // copyId is needed to abort
- state.copyId = result.copyId;
- if (result.copyStatus === "success") {
- state.result = result;
- state.isCompleted = true;
- }
- return [3 /*break*/, 6];
- case 2:
- if (!!state.isCompleted) return [3 /*break*/, 6];
- _a.label = 3;
- case 3:
- _a.trys.push([3, 5, , 6]);
- return [4 /*yield*/, state.blobClient.getProperties({ abortSignal: options.abortSignal })];
- case 4:
- result = _a.sent();
- copyStatus = result.copyStatus, copyProgress = result.copyProgress;
- prevCopyProgress = state.copyProgress;
- if (copyProgress) {
- state.copyProgress = copyProgress;
- }
- if (copyStatus === "pending" &&
- copyProgress !== prevCopyProgress &&
- typeof options.fireProgress === "function") {
- // trigger in setTimeout, or swallow error?
- options.fireProgress(state);
- }
- else if (copyStatus === "success") {
- state.result = result;
- state.isCompleted = true;
- }
- else if (copyStatus === "failed") {
- state.error = new Error("Blob copy failed with reason: \"" + (result.copyStatusDescription || "unknown") + "\"");
- state.isCompleted = true;
- }
- return [3 /*break*/, 6];
- case 5:
- err_1 = _a.sent();
- state.error = err_1;
- state.isCompleted = true;
- return [3 /*break*/, 6];
- case 6: return [2 /*return*/, makeBlobBeginCopyFromURLPollOperation(state)];
- }
+ return AvroPrimitiveType;
+}(AvroType));
+var AvroEnumType = /** @class */ (function (_super) {
+ tslib.__extends(AvroEnumType, _super);
+ function AvroEnumType(symbols) {
+ var _this = _super.call(this) || this;
+ _this._symbols = symbols;
+ return _this;
+ }
+ AvroEnumType.prototype.read = function (stream, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var value;
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0: return [4 /*yield*/, AvroParser.readInt(stream, options)];
+ case 1:
+ value = _a.sent();
+ return [2 /*return*/, this._symbols[value]];
+ }
+ });
});
- });
-};
-/**
- * Note: Intentionally using function expression over arrow function expression
- * so that the function can be invoked with a different context.
- * This affects what `this` refers to.
- * @ignore
- */
-var toString = function toString() {
- return JSON.stringify({ state: this.state }, function (key, value) {
- // remove blobClient from serialized state since a client can't be hydrated from this info.
- if (key === "blobClient") {
- return undefined;
- }
- return value;
- });
-};
-/**
- * Creates a poll operation given the provided state.
- * @ignore
- */
-function makeBlobBeginCopyFromURLPollOperation(state) {
- return {
- state: tslib.__assign({}, state),
- cancel: cancel,
- toString: toString,
- update: update
};
-}
-
-// Copyright (c) Microsoft Corporation. All rights reserved.
-// Licensed under the MIT License.
-/**
- * Generate a range string. For example:
- *
- * "bytes=255-" or "bytes=0-511"
- *
- * @export
- * @param {Range} iRange
- * @returns {string}
- */
-function rangeToString(iRange) {
- if (iRange.offset < 0) {
- throw new RangeError("Range.offset cannot be smaller than 0.");
- }
- if (iRange.count && iRange.count <= 0) {
- throw new RangeError("Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.");
- }
- return iRange.count
- ? "bytes=" + iRange.offset + "-" + (iRange.offset + iRange.count - 1)
- : "bytes=" + iRange.offset + "-";
-}
-
-// Copyright (c) Microsoft Corporation. All rights reserved.
-/**
- * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient}
- * and etc.
- *
- * @export
- * @class StorageClient
- */
-var StorageClient = /** @class */ (function () {
- /**
- * Creates an instance of StorageClient.
- * @param {string} url url to resource
- * @param {Pipeline} pipeline request policy pipeline.
- * @memberof StorageClient
- */
- function StorageClient(url, pipeline) {
- // URL should be encoded and only once, protocol layer shouldn't encode URL again
- this.url = escapeURLPath(url);
- this.accountName = getAccountNameFromUrl(url);
- this.pipeline = pipeline;
- this.storageClientContext = new StorageClientContext(this.url, pipeline.toServiceClientOptions());
- this.isHttps = iEqual(getURLScheme(this.url) || "", "https");
- this.credential = new AnonymousCredential();
- for (var _i = 0, _a = this.pipeline.factories; _i < _a.length; _i++) {
- var factory = _a[_i];
- if ((coreHttp.isNode && factory instanceof StorageSharedKeyCredential) ||
- factory instanceof AnonymousCredential ||
- coreHttp.isTokenCredential(factory)) {
- this.credential = factory;
- }
- }
- // Override protocol layer's default content-type
- var storageClientContext = this.storageClientContext;
- storageClientContext.requestContentType = undefined;
- }
- return StorageClient;
-}());
-
-// Copyright (c) Microsoft Corporation. All rights reserved.
-/**
- * States for Batch.
- *
- * @enum {number}
- */
-var BatchStates;
-(function (BatchStates) {
- BatchStates[BatchStates["Good"] = 0] = "Good";
- BatchStates[BatchStates["Error"] = 1] = "Error";
-})(BatchStates || (BatchStates = {}));
-/**
- * Batch provides basic parallel execution with concurrency limits.
- * Will stop execute left operations when one of the executed operation throws an error.
- * But Batch cannot cancel ongoing operations, you need to cancel them by yourself.
- *
- * @export
- * @class Batch
- */
-var Batch = /** @class */ (function () {
- /**
- * Creates an instance of Batch.
- * @param {number} [concurrency=5]
- * @memberof Batch
- */
- function Batch(concurrency) {
- if (concurrency === void 0) { concurrency = 5; }
- /**
- * Number of active operations under execution.
- *
- * @private
- * @type {number}
- * @memberof Batch
- */
- this.actives = 0;
- /**
- * Number of completed operations under execution.
- *
- * @private
- * @type {number}
- * @memberof Batch
- */
- this.completed = 0;
- /**
- * Offset of next operation to be executed.
- *
- * @private
- * @type {number}
- * @memberof Batch
- */
- this.offset = 0;
- /**
- * Operation array to be executed.
- *
- * @private
- * @type {Operation[]}
- * @memberof Batch
- */
- this.operations = [];
- /**
- * States of Batch. When an error happens, state will turn into error.
- * Batch will stop execute left operations.
- *
- * @private
- * @type {BatchStates}
- * @memberof Batch
- */
- this.state = BatchStates.Good;
- if (concurrency < 1) {
- throw new RangeError("concurrency must be larger than 0");
- }
- this.concurrency = concurrency;
- this.emitter = new events.EventEmitter();
+ return AvroEnumType;
+}(AvroType));
+var AvroUnionType = /** @class */ (function (_super) {
+ tslib.__extends(AvroUnionType, _super);
+ function AvroUnionType(types) {
+ var _this = _super.call(this) || this;
+ _this._types = types;
+ return _this;
}
- /**
- * Add a operation into queue.
- *
- * @param {Operation} operation
- * @memberof Batch
- */
- Batch.prototype.addOperation = function (operation) {
- var _this = this;
- this.operations.push(function () { return tslib.__awaiter(_this, void 0, void 0, function () {
- var error_1;
+ AvroUnionType.prototype.read = function (stream, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var typeIndex;
return tslib.__generator(this, function (_a) {
switch (_a.label) {
- case 0:
- _a.trys.push([0, 2, , 3]);
- this.actives++;
- return [4 /*yield*/, operation()];
+ case 0: return [4 /*yield*/, AvroParser.readInt(stream, options)];
case 1:
- _a.sent();
- this.actives--;
- this.completed++;
- this.parallelExecute();
- return [3 /*break*/, 3];
- case 2:
- error_1 = _a.sent();
- this.emitter.emit("error", error_1);
- return [3 /*break*/, 3];
- case 3: return [2 /*return*/];
+ typeIndex = _a.sent();
+ return [4 /*yield*/, this._types[typeIndex].read(stream, options)];
+ case 2: return [2 /*return*/, _a.sent()];
}
});
- }); });
+ });
};
- /**
- * Start execute operations in the queue.
- *
- * @returns {Promise}
- * @memberof Batch
- */
- Batch.prototype.do = function () {
+ return AvroUnionType;
+}(AvroType));
+var AvroMapType = /** @class */ (function (_super) {
+ tslib.__extends(AvroMapType, _super);
+ function AvroMapType(itemType) {
+ var _this = _super.call(this) || this;
+ _this._itemType = itemType;
+ return _this;
+ }
+ AvroMapType.prototype.read = function (stream, options) {
+ if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
+ var readItemMethod;
var _this = this;
return tslib.__generator(this, function (_a) {
- if (this.operations.length === 0) {
- return [2 /*return*/, Promise.resolve()];
+ switch (_a.label) {
+ case 0:
+ readItemMethod = function (s, options) { return tslib.__awaiter(_this, void 0, void 0, function () {
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0: return [4 /*yield*/, this._itemType.read(s, options)];
+ case 1: return [2 /*return*/, _a.sent()];
+ }
+ });
+ }); };
+ return [4 /*yield*/, AvroParser.readMap(stream, readItemMethod, options)];
+ case 1: return [2 /*return*/, _a.sent()];
}
- this.parallelExecute();
- return [2 /*return*/, new Promise(function (resolve, reject) {
- _this.emitter.on("finish", resolve);
- _this.emitter.on("error", function (error) {
- _this.state = BatchStates.Error;
- reject(error);
- });
- })];
});
});
};
- /**
- * Get next operation to be executed. Return null when reaching ends.
- *
- * @private
- * @returns {(Operation | null)}
- * @memberof Batch
- */
- Batch.prototype.nextOperation = function () {
- if (this.offset < this.operations.length) {
- return this.operations[this.offset++];
- }
- return null;
- };
- /**
- * Start execute operations. One one the most important difference between
- * this method with do() is that do() wraps as an sync method.
- *
- * @private
- * @returns {void}
- * @memberof Batch
- */
- Batch.prototype.parallelExecute = function () {
- if (this.state === BatchStates.Error) {
- return;
- }
- if (this.completed >= this.operations.length) {
- this.emitter.emit("finish");
- return;
- }
- while (this.actives < this.concurrency) {
- var operation = this.nextOperation();
- if (operation) {
- operation();
- }
- else {
- return;
- }
- }
- };
- return Batch;
-}());
-
-// Copyright (c) Microsoft Corporation. All rights reserved.
-/**
- * This class generates a readable stream from the data in an array of buffers.
- *
- * @export
- * @class BuffersStream
- */
-var BuffersStream = /** @class */ (function (_super) {
- tslib.__extends(BuffersStream, _super);
- /**
- * Creates an instance of BuffersStream that will emit the data
- * contained in the array of buffers.
- *
- * @param {Buffer[]} buffers Array of buffers containing the data
- * @param {number} byteLength The total length of data contained in the buffers
- * @memberof BuffersStream
- */
- function BuffersStream(buffers, byteLength, options) {
- var _this = _super.call(this, options) || this;
- _this.buffers = buffers;
- _this.byteLength = byteLength;
- _this.byteOffsetInCurrentBuffer = 0;
- _this.bufferIndex = 0;
- _this.pushedBytesLength = 0;
- // check byteLength is no larger than buffers[] total length
- var buffersLength = 0;
- for (var _i = 0, _a = _this.buffers; _i < _a.length; _i++) {
- var buf = _a[_i];
- buffersLength += buf.byteLength;
- }
- if (buffersLength < _this.byteLength) {
- throw new Error("Data size shouldn't be larger than the total length of buffers.");
- }
+ return AvroMapType;
+}(AvroType));
+var AvroRecordType = /** @class */ (function (_super) {
+ tslib.__extends(AvroRecordType, _super);
+ function AvroRecordType(fields, name) {
+ var _this = _super.call(this) || this;
+ _this._fields = fields;
+ _this._name = name;
return _this;
}
- /**
- * Internal _read() that will be called when the stream wants to pull more data in.
- *
- * @param {number} size Optional. The size of data to be read
- * @memberof BuffersStream
- */
- BuffersStream.prototype._read = function (size) {
- if (this.pushedBytesLength >= this.byteLength) {
- this.push(null);
- }
- if (!size) {
- size = this.readableHighWaterMark;
- }
- var outBuffers = [];
- var i = 0;
- while (i < size && this.pushedBytesLength < this.byteLength) {
- // The last buffer may be longer than the data it contains.
- var remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength;
- var remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer;
- var remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers);
- if (remaining > size - i) {
- // chunkSize = size - i
- var end = this.byteOffsetInCurrentBuffer + size - i;
- outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end));
- this.pushedBytesLength += size - i;
- this.byteOffsetInCurrentBuffer = end;
- i = size;
- break;
- }
- else {
- // chunkSize = remaining
- var end = this.byteOffsetInCurrentBuffer + remaining;
- outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end));
- if (remaining === remainingCapacityInThisBuffer) {
- // this.buffers[this.bufferIndex] used up, shift to next one
- this.byteOffsetInCurrentBuffer = 0;
- this.bufferIndex++;
- }
- else {
- this.byteOffsetInCurrentBuffer = end;
+ AvroRecordType.prototype.read = function (stream, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var record, _a, _b, _i, key, _c, _d;
+ return tslib.__generator(this, function (_e) {
+ switch (_e.label) {
+ case 0:
+ record = {};
+ record["$schema"] = this._name;
+ _a = [];
+ for (_b in this._fields)
+ _a.push(_b);
+ _i = 0;
+ _e.label = 1;
+ case 1:
+ if (!(_i < _a.length)) return [3 /*break*/, 4];
+ key = _a[_i];
+ if (!this._fields.hasOwnProperty(key)) return [3 /*break*/, 3];
+ _c = record;
+ _d = key;
+ return [4 /*yield*/, this._fields[key].read(stream, options)];
+ case 2:
+ _c[_d] = _e.sent();
+ _e.label = 3;
+ case 3:
+ _i++;
+ return [3 /*break*/, 1];
+ case 4: return [2 /*return*/, record];
}
- this.pushedBytesLength += remaining;
- i += remaining;
- }
- }
- if (outBuffers.length > 1) {
- this.push(Buffer.concat(outBuffers));
- }
- else if (outBuffers.length === 1) {
- this.push(outBuffers[0]);
- }
+ });
+ });
};
- return BuffersStream;
-}(stream.Readable));
+ return AvroRecordType;
+}(AvroType));
-// Copyright (c) Microsoft Corporation. All rights reserved.
-/**
- * maxBufferLength is max size of each buffer in the pooled buffers.
- */
-// Can't use import as Typescript doesn't recognize "buffer".
-var maxBufferLength = __webpack_require__(293).constants.MAX_LENGTH;
-/**
- * This class provides a buffer container which conceptually has no hard size limit.
- * It accepts a capacity, an array of input buffers and the total length of input data.
- * It will allocate an internal "buffer" of the capacity and fill the data in the input buffers
- * into the internal "buffer" serially with respect to the total length.
- * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream
- * assembled from all the data in the internal "buffer".
- *
- * @export
- * @class BufferScheduler
- */
-var PooledBuffer = /** @class */ (function () {
- function PooledBuffer(capacity, buffers, totalLength) {
- /**
- * Internal buffers used to keep the data.
- * Each buffer has a length of the maxBufferLength except last one.
- *
- * @private
- * @type {Buffer[]}
- * @memberof PooledBuffer
- */
- this.buffers = [];
- this.capacity = capacity;
- this._size = 0;
- // allocate
- var bufferNum = Math.ceil(capacity / maxBufferLength);
- for (var i = 0; i < bufferNum; i++) {
- var len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength;
- if (len === 0) {
- len = maxBufferLength;
- }
- this.buffers.push(Buffer.allocUnsafe(len));
- }
- if (buffers) {
- this.fill(buffers, totalLength);
- }
+// Copyright (c) Microsoft Corporation.
+var AvroReader = /** @class */ (function () {
+ function AvroReader(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) {
+ this._dataStream = dataStream;
+ this._headerStream = headerStream || dataStream;
+ this._initialized = false;
+ this._blockOffset = currentBlockOffset || 0;
+ this._objectIndex = indexWithinCurrentBlock || 0;
+ this._initialBlockOffset = currentBlockOffset || 0;
}
- Object.defineProperty(PooledBuffer.prototype, "size", {
- /**
- * The size of the data contained in the pooled buffers.
- */
+ Object.defineProperty(AvroReader.prototype, "blockOffset", {
get: function () {
- return this._size;
+ return this._blockOffset;
},
enumerable: false,
configurable: true
});
- /**
- * Fill the internal buffers with data in the input buffers serially
- * with respect to the total length and the total capacity of the internal buffers.
- * Data copied will be shift out of the input buffers.
- *
- * @param {Buffer[]} buffers Input buffers containing the data to be filled in the pooled buffer
- * @param {number} totalLength Total length of the data to be filled in.
- *
- * @returns {void}
- * @memberof PooledBuffer
- */
- PooledBuffer.prototype.fill = function (buffers, totalLength) {
- this._size = Math.min(this.capacity, totalLength);
- var i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0;
- while (totalCopiedNum < this._size) {
- var source = buffers[i];
- var target = this.buffers[j];
- var copiedNum = source.copy(target, targetOffset, sourceOffset);
- totalCopiedNum += copiedNum;
- sourceOffset += copiedNum;
- targetOffset += copiedNum;
- if (sourceOffset === source.length) {
- i++;
- sourceOffset = 0;
- }
- if (targetOffset === target.length) {
- j++;
- targetOffset = 0;
- }
- }
- // clear copied from source buffers
- buffers.splice(0, i);
- if (buffers.length > 0) {
- buffers[0] = buffers[0].slice(sourceOffset);
+ Object.defineProperty(AvroReader.prototype, "objectIndex", {
+ get: function () {
+ return this._objectIndex;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ AvroReader.prototype.initialize = function (options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var header, _a, codec, _b, schema, _c, i;
+ return tslib.__generator(this, function (_d) {
+ switch (_d.label) {
+ case 0: return [4 /*yield*/, AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, {
+ abortSignal: options.abortSignal
+ })];
+ case 1:
+ header = _d.sent();
+ if (!arraysEqual(header, AVRO_INIT_BYTES)) {
+ throw new Error("Stream is not an Avro file.");
+ }
+ // File metadata is written as if defined by the following map schema:
+ // { "type": "map", "values": "bytes"}
+ _a = this;
+ return [4 /*yield*/, AvroParser.readMap(this._headerStream, AvroParser.readString, {
+ abortSignal: options.abortSignal
+ })];
+ case 2:
+ // File metadata is written as if defined by the following map schema:
+ // { "type": "map", "values": "bytes"}
+ _a._metadata = _d.sent();
+ codec = this._metadata[AVRO_CODEC_KEY];
+ if (!(codec == undefined || codec == "null")) {
+ throw new Error("Codecs are not supported");
+ }
+ // The 16-byte, randomly-generated sync marker for this file.
+ _b = this;
+ return [4 /*yield*/, AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, {
+ abortSignal: options.abortSignal
+ })];
+ case 3:
+ // The 16-byte, randomly-generated sync marker for this file.
+ _b._syncMarker = _d.sent();
+ schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]);
+ this._itemType = AvroType.fromSchema(schema);
+ if (this._blockOffset == 0) {
+ this._blockOffset = this._initialBlockOffset + this._dataStream.position;
+ }
+ _c = this;
+ return [4 /*yield*/, AvroParser.readLong(this._dataStream, {
+ abortSignal: options.abortSignal
+ })];
+ case 4:
+ _c._itemsRemainingInBlock = _d.sent();
+ // skip block length
+ return [4 /*yield*/, AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })];
+ case 5:
+ // skip block length
+ _d.sent();
+ this._initialized = true;
+ if (!(this._objectIndex && this._objectIndex > 0)) return [3 /*break*/, 9];
+ i = 0;
+ _d.label = 6;
+ case 6:
+ if (!(i < this._objectIndex)) return [3 /*break*/, 9];
+ return [4 /*yield*/, this._itemType.read(this._dataStream, { abortSignal: options.abortSignal })];
+ case 7:
+ _d.sent();
+ this._itemsRemainingInBlock--;
+ _d.label = 8;
+ case 8:
+ i++;
+ return [3 /*break*/, 6];
+ case 9: return [2 /*return*/];
+ }
+ });
+ });
+ };
+ AvroReader.prototype.hasNext = function () {
+ return !this._initialized || this._itemsRemainingInBlock > 0;
+ };
+ AvroReader.prototype.parseObjects = function (options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__asyncGenerator(this, arguments, function parseObjects_1() {
+ var result, marker, _a, err_1;
+ return tslib.__generator(this, function (_b) {
+ switch (_b.label) {
+ case 0:
+ if (!!this._initialized) return [3 /*break*/, 2];
+ return [4 /*yield*/, tslib.__await(this.initialize(options))];
+ case 1:
+ _b.sent();
+ _b.label = 2;
+ case 2:
+ if (!this.hasNext()) return [3 /*break*/, 13];
+ return [4 /*yield*/, tslib.__await(this._itemType.read(this._dataStream, {
+ abortSignal: options.abortSignal
+ }))];
+ case 3:
+ result = _b.sent();
+ this._itemsRemainingInBlock--;
+ this._objectIndex++;
+ if (!(this._itemsRemainingInBlock == 0)) return [3 /*break*/, 10];
+ return [4 /*yield*/, tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, {
+ abortSignal: options.abortSignal
+ }))];
+ case 4:
+ marker = _b.sent();
+ this._blockOffset = this._initialBlockOffset + this._dataStream.position;
+ this._objectIndex = 0;
+ if (!arraysEqual(this._syncMarker, marker)) {
+ throw new Error("Stream is not a valid Avro file.");
+ }
+ _b.label = 5;
+ case 5:
+ _b.trys.push([5, 7, , 8]);
+ _a = this;
+ return [4 /*yield*/, tslib.__await(AvroParser.readLong(this._dataStream, {
+ abortSignal: options.abortSignal
+ }))];
+ case 6:
+ _a._itemsRemainingInBlock = _b.sent();
+ return [3 /*break*/, 8];
+ case 7:
+ err_1 = _b.sent();
+ // We hit the end of the stream.
+ this._itemsRemainingInBlock = 0;
+ return [3 /*break*/, 8];
+ case 8:
+ if (!(this._itemsRemainingInBlock > 0)) return [3 /*break*/, 10];
+ // Ignore block size
+ return [4 /*yield*/, tslib.__await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }))];
+ case 9:
+ // Ignore block size
+ _b.sent();
+ _b.label = 10;
+ case 10: return [4 /*yield*/, tslib.__await(result)];
+ case 11: return [4 /*yield*/, _b.sent()];
+ case 12:
+ _b.sent();
+ return [3 /*break*/, 2];
+ case 13: return [2 /*return*/];
+ }
+ });
+ });
+ };
+ return AvroReader;
+}());
+
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+var AvroReadable = /** @class */ (function () {
+ function AvroReadable() {
+ }
+ return AvroReadable;
+}());
+
+// Copyright (c) Microsoft Corporation.
+var ABORT_ERROR$1 = new abortController.AbortError("Reading from the avro stream was aborted.");
+var AvroReadableFromStream = /** @class */ (function (_super) {
+ tslib.__extends(AvroReadableFromStream, _super);
+ function AvroReadableFromStream(readable) {
+ var _this = _super.call(this) || this;
+ _this._readable = readable;
+ _this._position = 0;
+ return _this;
+ }
+ AvroReadableFromStream.prototype.toUint8Array = function (data) {
+ if (typeof data === "string") {
+ return Buffer.from(data);
}
+ return data;
+ };
+ Object.defineProperty(AvroReadableFromStream.prototype, "position", {
+ get: function () {
+ return this._position;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ AvroReadableFromStream.prototype.read = function (size, options) {
+ var _a;
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var chunk;
+ var _this = this;
+ return tslib.__generator(this, function (_b) {
+ if ((_a = options.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) {
+ throw ABORT_ERROR$1;
+ }
+ if (size < 0) {
+ throw new Error("size parameter should be positive: " + size);
+ }
+ if (size === 0) {
+ return [2 /*return*/, new Uint8Array()];
+ }
+ if (!this._readable.readable) {
+ throw new Error("Stream no longer readable.");
+ }
+ chunk = this._readable.read(size);
+ if (chunk) {
+ this._position += chunk.length;
+ // chunk.length maybe less than desired size if the stream ends.
+ return [2 /*return*/, this.toUint8Array(chunk)];
+ }
+ else {
+ // register callback to wait for enough data to read
+ return [2 /*return*/, new Promise(function (resolve, reject) {
+ var cleanUp = function () {
+ _this._readable.removeListener("readable", readableCallback);
+ _this._readable.removeListener("error", rejectCallback);
+ _this._readable.removeListener("end", rejectCallback);
+ _this._readable.removeListener("close", rejectCallback);
+ if (options.abortSignal) {
+ options.abortSignal.removeEventListener("abort", abortHandler);
+ }
+ };
+ var readableCallback = function () {
+ var chunk = _this._readable.read(size);
+ if (chunk) {
+ _this._position += chunk.length;
+ cleanUp();
+ // chunk.length maybe less than desired size if the stream ends.
+ resolve(_this.toUint8Array(chunk));
+ }
+ };
+ var rejectCallback = function () {
+ cleanUp();
+ reject();
+ };
+ var abortHandler = function () {
+ cleanUp();
+ reject(ABORT_ERROR$1);
+ };
+ _this._readable.on("readable", readableCallback);
+ _this._readable.once("error", rejectCallback);
+ _this._readable.once("end", rejectCallback);
+ _this._readable.once("close", rejectCallback);
+ if (options.abortSignal) {
+ options.abortSignal.addEventListener("abort", abortHandler);
+ }
+ })];
+ }
+ });
+ });
};
+ return AvroReadableFromStream;
+}(AvroReadable));
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ *
+ * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query.
+ *
+ * @class BlobQuickQueryStream
+ * @extends {Readable}
+ */
+var BlobQuickQueryStream = /** @class */ (function (_super) {
+ tslib.__extends(BlobQuickQueryStream, _super);
/**
- * Get the readable stream assembled from all the data in the internal buffers.
+ * Creates an instance of BlobQuickQueryStream.
*
- * @returns {Readable}
- * @memberof PooledBuffer
+ * @param {NodeJS.ReadableStream} source The current ReadableStream returned from getter
+ * @param {BlobQuickQueryStreamOptions} [options={}]
+ * @memberof BlobQuickQueryStream
*/
- PooledBuffer.prototype.getReadableStream = function () {
- return new BuffersStream(this.buffers, this.size);
+ function BlobQuickQueryStream(source, options) {
+ if (options === void 0) { options = {}; }
+ var _this = _super.call(this) || this;
+ _this.source = source;
+ _this.onProgress = options.onProgress;
+ _this.onError = options.onError;
+ _this.avroReader = new AvroReader(new AvroReadableFromStream(_this.source));
+ _this.avroIter = _this.avroReader.parseObjects({ abortSignal: options.abortSignal });
+ return _this;
+ }
+ BlobQuickQueryStream.prototype._read = function () {
+ var _this = this;
+ this.readInternal().catch(function (err) {
+ _this.emit("error", err);
+ });
};
- return PooledBuffer;
-}());
+ BlobQuickQueryStream.prototype.readInternal = function () {
+ var e_1, _a;
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var _b, _c, obj, schema, exit, data, bytesScanned, totalBytes, fatal, name_1, description, position, e_1_1;
+ return tslib.__generator(this, function (_d) {
+ switch (_d.label) {
+ case 0:
+ _d.trys.push([0, 5, 6, 11]);
+ _b = tslib.__asyncValues(this.avroIter);
+ _d.label = 1;
+ case 1: return [4 /*yield*/, _b.next()];
+ case 2:
+ if (!(_c = _d.sent(), !_c.done)) return [3 /*break*/, 4];
+ obj = _c.value;
+ schema = obj.$schema;
+ if (typeof schema !== "string") {
+ throw Error("Missing schema in avro record.");
+ }
+ exit = false;
+ switch (schema) {
+ case "com.microsoft.azure.storage.queryBlobContents.resultData":
+ data = obj.data;
+ if (data instanceof Uint8Array === false) {
+ throw Error("Invalid data in avro result record.");
+ }
+ if (!this.push(Buffer.from(data))) {
+ exit = true;
+ }
+ break;
+ case "com.microsoft.azure.storage.queryBlobContents.progress":
+ bytesScanned = obj.bytesScanned;
+ if (typeof bytesScanned !== "number") {
+ throw Error("Invalid bytesScanned in avro progress record.");
+ }
+ if (this.onProgress) {
+ this.onProgress({ loadedBytes: bytesScanned });
+ }
+ break;
+ case "com.microsoft.azure.storage.queryBlobContents.end":
+ if (this.onProgress) {
+ totalBytes = obj.totalBytes;
+ if (typeof totalBytes !== "number") {
+ throw Error("Invalid totalBytes in avro end record.");
+ }
+ this.onProgress({ loadedBytes: totalBytes });
+ }
+ this.push(null);
+ break;
+ case "com.microsoft.azure.storage.queryBlobContents.error":
+ if (this.onError) {
+ fatal = obj.fatal;
+ if (typeof fatal !== "boolean") {
+ throw Error("Invalid fatal in avro error record.");
+ }
+ name_1 = obj.name;
+ if (typeof name_1 !== "string") {
+ throw Error("Invalid name in avro error record.");
+ }
+ description = obj.description;
+ if (typeof description !== "string") {
+ throw Error("Invalid description in avro error record.");
+ }
+ position = obj.position;
+ if (typeof position !== "number") {
+ throw Error("Invalid position in avro error record.");
+ }
+ this.onError({
+ position: position,
+ name: name_1,
+ isFatal: fatal,
+ description: description
+ });
+ }
+ break;
+ default:
+ throw Error("Unknown schema " + schema + " in avro progress record.");
+ }
+ if (exit) {
+ return [3 /*break*/, 4];
+ }
+ _d.label = 3;
+ case 3: return [3 /*break*/, 1];
+ case 4: return [3 /*break*/, 11];
+ case 5:
+ e_1_1 = _d.sent();
+ e_1 = { error: e_1_1 };
+ return [3 /*break*/, 11];
+ case 6:
+ _d.trys.push([6, , 9, 10]);
+ if (!(_c && !_c.done && (_a = _b.return))) return [3 /*break*/, 8];
+ return [4 /*yield*/, _a.call(_b)];
+ case 7:
+ _d.sent();
+ _d.label = 8;
+ case 8: return [3 /*break*/, 10];
+ case 9:
+ if (e_1) throw e_1.error;
+ return [7 /*endfinally*/];
+ case 10: return [7 /*endfinally*/];
+ case 11: return [2 /*return*/];
+ }
+ });
+ });
+ };
+ return BlobQuickQueryStream;
+}(stream.Readable));
// Copyright (c) Microsoft Corporation. All rights reserved.
/**
- * This class accepts a Node.js Readable stream as input, and keeps reading data
- * from the stream into the internal buffer structure, until it reaches maxBuffers.
- * Every available buffer will try to trigger outgoingHandler.
- *
- * The internal buffer structure includes an incoming buffer array, and a outgoing
- * buffer array. The incoming buffer array includes the "empty" buffers can be filled
- * with new incoming data. The outgoing array includes the filled buffers to be
- * handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize.
- *
- * NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
*
- * NUM_OF_ALL_BUFFERS <= maxBuffers
+ * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will
+ * parse avor data returned by blob query.
*
- * PERFORMANCE IMPROVEMENT TIPS:
- * 1. Input stream highWaterMark is better to set a same value with bufferSize
- * parameter, which will avoid Buffer.concat() operations.
- * 2. concurrency should set a smaller value than maxBuffers, which is helpful to
- * reduce the possibility when a outgoing handler waits for the stream data.
- * in this situation, outgoing handlers are blocked.
- * Outgoing queue shouldn't be empty.
* @export
- * @class BufferScheduler
+ * @class BlobQueryResponse
+ * @implements {BlobDownloadResponseModel}
*/
-var BufferScheduler = /** @class */ (function () {
+var BlobQueryResponse = /** @class */ (function () {
/**
- * Creates an instance of BufferScheduler.
+ * Creates an instance of BlobQueryResponse.
*
- * @param {Readable} readable A Node.js Readable stream
- * @param {number} bufferSize Buffer size of every maintained buffer
- * @param {number} maxBuffers How many buffers can be allocated
- * @param {OutgoingHandler} outgoingHandler An async function scheduled to be
- * triggered when a buffer fully filled
- * with stream data
- * @param {number} concurrency Concurrency of executing outgoingHandlers (>0)
- * @param {string} [encoding] [Optional] Encoding of Readable stream when it's a string stream
- * @memberof BufferScheduler
- */
- function BufferScheduler(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) {
+ * @param {BlobQueryResponseModel} originalResponse
+ * @param {BlobQuickQueryStreamOptions} [options={}]
+ * @memberof BlobQueryResponse
+ */
+ function BlobQueryResponse(originalResponse, options) {
+ if (options === void 0) { options = {}; }
+ this.originalResponse = originalResponse;
+ this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options);
+ }
+ Object.defineProperty(BlobQueryResponse.prototype, "acceptRanges", {
/**
- * An internal event emitter.
+ * Indicates that the service supports
+ * requests for partial file content.
*
- * @private
- * @type {EventEmitter}
- * @memberof BufferScheduler
+ * @readonly
+ * @type {(string | undefined)}
+ * @memberof BlobQueryResponse
*/
- this.emitter = new events.EventEmitter();
+ get: function () {
+ return this.originalResponse.acceptRanges;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "cacheControl", {
/**
- * An internal offset marker to track data offset in bytes of next outgoingHandler.
+ * Returns if it was previously specified
+ * for the file.
*
- * @private
- * @type {number}
- * @memberof BufferScheduler
+ * @readonly
+ * @type {(string | undefined)}
+ * @memberof BlobQueryResponse
*/
- this.offset = 0;
+ get: function () {
+ return this.originalResponse.cacheControl;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "contentDisposition", {
/**
- * An internal marker to track whether stream is end.
+ * Returns the value that was specified
+ * for the 'x-ms-content-disposition' header and specifies how to process the
+ * response.
*
- * @private
- * @type {boolean}
- * @memberof BufferScheduler
+ * @readonly
+ * @type {(string | undefined)}
+ * @memberof BlobQueryResponse
*/
- this.isStreamEnd = false;
+ get: function () {
+ return this.originalResponse.contentDisposition;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "contentEncoding", {
/**
- * An internal marker to track whether stream or outgoingHandler returns error.
+ * Returns the value that was specified
+ * for the Content-Encoding request header.
*
- * @private
- * @type {boolean}
- * @memberof BufferScheduler
+ * @readonly
+ * @type {(string | undefined)}
+ * @memberof BlobQueryResponse
*/
- this.isError = false;
+ get: function () {
+ return this.originalResponse.contentEncoding;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "contentLanguage", {
/**
- * How many handlers are executing.
+ * Returns the value that was specified
+ * for the Content-Language request header.
*
- * @private
- * @type {number}
- * @memberof BufferScheduler
+ * @readonly
+ * @type {(string | undefined)}
+ * @memberof BlobQueryResponse
*/
- this.executingOutgoingHandlers = 0;
+ get: function () {
+ return this.originalResponse.contentLanguage;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "blobSequenceNumber", {
/**
- * How many buffers have been allocated.
+ * The current sequence number for a
+ * page blob. This header is not returned for block blobs or append blobs.
*
- * @private
- * @type {number}
- * @memberof BufferScheduler
+ * @readonly
+ * @type {(number | undefined)}
+ * @memberof BlobQueryResponse
*/
- this.numBuffers = 0;
+ get: function () {
+ return this.originalResponse.blobSequenceNumber;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "blobType", {
/**
- * Because this class doesn't know how much data every time stream pops, which
- * is defined by highWaterMarker of the stream. So BufferScheduler will cache
- * data received from the stream, when data in unresolvedDataArray exceeds the
- * blockSize defined, it will try to concat a blockSize of buffer, fill into available
- * buffers from incoming and push to outgoing array.
+ * The blob's type. Possible values include:
+ * 'BlockBlob', 'PageBlob', 'AppendBlob'.
*
- * @private
- * @type {Buffer[]}
- * @memberof BufferScheduler
+ * @readonly
+ * @type {(BlobType | undefined)}
+ * @memberof BlobQueryResponse
*/
- this.unresolvedDataArray = [];
+ get: function () {
+ return this.originalResponse.blobType;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "contentLength", {
/**
- * How much data consisted in unresolvedDataArray.
+ * The number of bytes present in the
+ * response body.
*
- * @private
- * @type {number}
- * @memberof BufferScheduler
+ * @readonly
+ * @type {(number | undefined)}
+ * @memberof BlobQueryResponse
*/
- this.unresolvedLength = 0;
+ get: function () {
+ return this.originalResponse.contentLength;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "contentMD5", {
/**
- * The array includes all the available buffers can be used to fill data from stream.
+ * If the file has an MD5 hash and the
+ * request is to read the full file, this response header is returned so that
+ * the client can check for message content integrity. If the request is to
+ * read a specified range and the 'x-ms-range-get-content-md5' is set to
+ * true, then the request returns an MD5 hash for the range, as long as the
+ * range size is less than or equal to 4 MB. If neither of these sets of
+ * conditions is true, then no value is returned for the 'Content-MD5'
+ * header.
*
- * @private
- * @type {PooledBuffer[]}
- * @memberof BufferScheduler
+ * @readonly
+ * @type {(Uint8Array | undefined)}
+ * @memberof BlobQueryResponse
*/
- this.incoming = [];
+ get: function () {
+ return this.originalResponse.contentMD5;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "contentRange", {
/**
- * The array (queue) includes all the buffers filled from stream data.
+ * Indicates the range of bytes returned if
+ * the client requested a subset of the file by setting the Range request
+ * header.
*
- * @private
- * @type {PooledBuffer[]}
- * @memberof BufferScheduler
+ * @readonly
+ * @type {(string | undefined)}
+ * @memberof BlobQueryResponse
*/
- this.outgoing = [];
- if (bufferSize <= 0) {
- throw new RangeError("bufferSize must be larger than 0, current is " + bufferSize);
- }
- if (maxBuffers <= 0) {
- throw new RangeError("maxBuffers must be larger than 0, current is " + maxBuffers);
- }
- if (concurrency <= 0) {
- throw new RangeError("concurrency must be larger than 0, current is " + concurrency);
- }
- this.bufferSize = bufferSize;
- this.maxBuffers = maxBuffers;
- this.readable = readable;
- this.outgoingHandler = outgoingHandler;
- this.concurrency = concurrency;
- this.encoding = encoding;
- }
- /**
- * Start the scheduler, will return error when stream of any of the outgoingHandlers
- * returns error.
- *
- * @returns {Promise}
- * @memberof BufferScheduler
- */
- BufferScheduler.prototype.do = function () {
- return tslib.__awaiter(this, void 0, void 0, function () {
- var _this = this;
- return tslib.__generator(this, function (_a) {
- return [2 /*return*/, new Promise(function (resolve, reject) {
- _this.readable.on("data", function (data) {
- data = typeof data === "string" ? Buffer.from(data, _this.encoding) : data;
- _this.appendUnresolvedData(data);
- if (!_this.resolveData()) {
- _this.readable.pause();
- }
- });
- _this.readable.on("error", function (err) {
- _this.emitter.emit("error", err);
- });
- _this.readable.on("end", function () {
- _this.isStreamEnd = true;
- _this.emitter.emit("checkEnd");
- });
- _this.emitter.on("error", function (err) {
- _this.isError = true;
- _this.readable.pause();
- reject(err);
- });
- _this.emitter.on("checkEnd", function () {
- if (_this.outgoing.length > 0) {
- _this.triggerOutgoingHandlers();
- return;
- }
- if (_this.isStreamEnd && _this.executingOutgoingHandlers === 0) {
- if (_this.unresolvedLength > 0 && _this.unresolvedLength < _this.bufferSize) {
- var buffer_1 = _this.shiftBufferFromUnresolvedDataArray();
- _this.outgoingHandler(function () { return buffer_1.getReadableStream(); }, buffer_1.size, _this.offset)
- .then(resolve)
- .catch(reject);
- }
- else if (_this.unresolvedLength >= _this.bufferSize) {
- return;
- }
- else {
- resolve();
- }
- }
- });
- })];
- });
- });
- };
- /**
- * Insert a new data into unresolved array.
- *
- * @private
- * @param {Buffer} data
- * @memberof BufferScheduler
- */
- BufferScheduler.prototype.appendUnresolvedData = function (data) {
- this.unresolvedDataArray.push(data);
- this.unresolvedLength += data.length;
- };
- /**
- * Try to shift a buffer with size in blockSize. The buffer returned may be less
- * than blockSize when data in unresolvedDataArray is less than bufferSize.
- *
- * @private
- * @returns {PooledBuffer}
- * @memberof BufferScheduler
- */
- BufferScheduler.prototype.shiftBufferFromUnresolvedDataArray = function (buffer) {
- if (!buffer) {
- buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength);
- }
- else {
- buffer.fill(this.unresolvedDataArray, this.unresolvedLength);
- }
- this.unresolvedLength -= buffer.size;
- return buffer;
- };
- /**
- * Resolve data in unresolvedDataArray. For every buffer with size in blockSize
- * shifted, it will try to get (or allocate a buffer) from incoming, and fill it,
- * then push it into outgoing to be handled by outgoing handler.
- *
- * Return false when available buffers in incoming are not enough, else true.
- *
- * @private
- * @returns {boolean} Return false when buffers in incoming are not enough, else true.
- * @memberof BufferScheduler
- */
- BufferScheduler.prototype.resolveData = function () {
- while (this.unresolvedLength >= this.bufferSize) {
- var buffer = void 0;
- if (this.incoming.length > 0) {
- buffer = this.incoming.shift();
- this.shiftBufferFromUnresolvedDataArray(buffer);
- }
- else {
- if (this.numBuffers < this.maxBuffers) {
- buffer = this.shiftBufferFromUnresolvedDataArray();
- this.numBuffers++;
- }
- else {
- // No available buffer, wait for buffer returned
- return false;
- }
- }
- this.outgoing.push(buffer);
- this.triggerOutgoingHandlers();
- }
- return true;
- };
- /**
- * Try to trigger a outgoing handler for every buffer in outgoing. Stop when
- * concurrency reaches.
- *
- * @private
- * @memberof BufferScheduler
- */
- BufferScheduler.prototype.triggerOutgoingHandlers = function () {
- return tslib.__awaiter(this, void 0, void 0, function () {
- var buffer;
- return tslib.__generator(this, function (_a) {
- do {
- if (this.executingOutgoingHandlers >= this.concurrency) {
- return [2 /*return*/];
- }
- buffer = this.outgoing.shift();
- if (buffer) {
- this.triggerOutgoingHandler(buffer);
- }
- } while (buffer);
- return [2 /*return*/];
- });
- });
- };
- /**
- * Trigger a outgoing handler for a buffer shifted from outgoing.
- *
- * @private
- * @param {Buffer} buffer
- * @returns {Promise}
- * @memberof BufferScheduler
- */
- BufferScheduler.prototype.triggerOutgoingHandler = function (buffer) {
- return tslib.__awaiter(this, void 0, void 0, function () {
- var bufferLength, err_1;
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0:
- bufferLength = buffer.size;
- this.executingOutgoingHandlers++;
- this.offset += bufferLength;
- _a.label = 1;
- case 1:
- _a.trys.push([1, 3, , 4]);
- return [4 /*yield*/, this.outgoingHandler(function () { return buffer.getReadableStream(); }, bufferLength, this.offset - bufferLength)];
- case 2:
- _a.sent();
- return [3 /*break*/, 4];
- case 3:
- err_1 = _a.sent();
- this.emitter.emit("error", err_1);
- return [2 /*return*/];
- case 4:
- this.executingOutgoingHandlers--;
- this.reuseBuffer(buffer);
- this.emitter.emit("checkEnd");
- return [2 /*return*/];
- }
- });
- });
- };
- /**
- * Return buffer used by outgoing handler into incoming.
- *
- * @private
- * @param {Buffer} buffer
- * @memberof BufferScheduler
- */
- BufferScheduler.prototype.reuseBuffer = function (buffer) {
- this.incoming.push(buffer);
- if (!this.isError && this.resolveData() && !this.isStreamEnd) {
- this.readable.resume();
- }
- };
- return BufferScheduler;
-}());
-
-// Copyright (c) Microsoft Corporation.
-/**
- * Creates a span using the global tracer.
- * @param name The name of the operation being performed.
- * @param tracingOptions The options for the underlying http request.
- */
-function createSpan(operationName, tracingOptions) {
- if (tracingOptions === void 0) { tracingOptions = {}; }
- var tracer = coreTracing.getTracer();
- var spanOptions = tslib.__assign(tslib.__assign({}, tracingOptions.spanOptions), { kind: api.SpanKind.INTERNAL });
- var span = tracer.startSpan("Azure.Storage.Blob." + operationName, spanOptions);
- span.setAttribute("az.namespace", "Microsoft.Storage");
- var newOptions = tracingOptions.spanOptions || {};
- if (span.isRecording()) {
- newOptions = tslib.__assign(tslib.__assign({}, tracingOptions.spanOptions), { parent: span.context(), attributes: tslib.__assign(tslib.__assign({}, spanOptions.attributes), { "az.namespace": "Microsoft.Storage" }) });
- }
- return {
- span: span,
- spanOptions: newOptions
- };
-}
-
-// Copyright (c) Microsoft Corporation. All rights reserved.
-/**
- * Reads a readable stream into buffer. Fill the buffer from offset to end.
- *
- * @export
- * @param {NodeJS.ReadableStream} stream A Node.js Readable stream
- * @param {Buffer} buffer Buffer to be filled, length must >= offset
- * @param {number} offset From which position in the buffer to be filled, inclusive
- * @param {number} end To which position in the buffer to be filled, exclusive
- * @param {string} [encoding] Encoding of the Readable stream
- * @returns {Promise}
- */
-function streamToBuffer(stream, buffer, offset, end, encoding) {
- return tslib.__awaiter(this, void 0, void 0, function () {
- var pos, count;
- return tslib.__generator(this, function (_a) {
- pos = 0;
- count = end - offset;
- return [2 /*return*/, new Promise(function (resolve, reject) {
- stream.on("readable", function () {
- if (pos >= count) {
- resolve();
- return;
- }
- var chunk = stream.read();
- if (!chunk) {
- return;
- }
- if (typeof chunk === "string") {
- chunk = Buffer.from(chunk, encoding);
- }
- // How much data needed in this chunk
- var chunkLength = pos + chunk.length > count ? count - pos : chunk.length;
- buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength);
- pos += chunkLength;
- });
- stream.on("end", function () {
- if (pos < count) {
- reject(new Error("Stream drains before getting enough data needed. Data read: " + pos + ", data need: " + count));
- }
- resolve();
- });
- stream.on("error", reject);
- })];
- });
- });
-}
-/**
- * Reads a readable stream into buffer entirely.
- *
- * @export
- * @param {NodeJS.ReadableStream} stream A Node.js Readable stream
- * @param {Buffer} buffer Buffer to be filled, length must >= offset
- * @param {string} [encoding] Encoding of the Readable stream
- * @returns {Promise} with the count of bytes read.
- * @throws {RangeError} If buffer size is not big enough.
- */
-function streamToBuffer2(stream, buffer, encoding) {
- return tslib.__awaiter(this, void 0, void 0, function () {
- var pos, bufferSize;
- return tslib.__generator(this, function (_a) {
- pos = 0;
- bufferSize = buffer.length;
- return [2 /*return*/, new Promise(function (resolve, reject) {
- stream.on("readable", function () {
- var chunk = stream.read();
- if (!chunk) {
- return;
- }
- if (typeof chunk === "string") {
- chunk = Buffer.from(chunk, encoding);
- }
- if (pos + chunk.length > bufferSize) {
- reject(new Error("Stream exceeds buffer size. Buffer size: " + bufferSize));
- return;
- }
- buffer.fill(chunk, pos, pos + chunk.length);
- pos += chunk.length;
- });
- stream.on("end", function () {
- resolve(pos);
- });
- stream.on("error", reject);
- })];
- });
+ get: function () {
+ return this.originalResponse.contentRange;
+ },
+ enumerable: false,
+ configurable: true
});
-}
-/**
- * ONLY AVAILABLE IN NODE.JS RUNTIME.
- *
- * Writes the content of a readstream to a local file. Returns a Promise which is completed after the file handle is closed.
- *
- * @export
- * @param {NodeJS.ReadableStream} rs The read stream.
- * @param {string} file Destination file path.
- * @returns {Promise}
- */
-function readStreamToLocalFile(rs, file) {
- return tslib.__awaiter(this, void 0, void 0, function () {
- return tslib.__generator(this, function (_a) {
- return [2 /*return*/, new Promise(function (resolve, reject) {
- var ws = fs.createWriteStream(file);
- rs.on("error", function (err) {
- reject(err);
- });
- ws.on("error", function (err) {
- reject(err);
- });
- ws.on("close", resolve);
- rs.pipe(ws);
- })];
- });
+ Object.defineProperty(BlobQueryResponse.prototype, "contentType", {
+ /**
+ * The content type specified for the file.
+ * The default content type is 'application/octet-stream'
+ *
+ * @readonly
+ * @type {(string | undefined)}
+ * @memberof BlobQueryResponse
+ */
+ get: function () {
+ return this.originalResponse.contentType;
+ },
+ enumerable: false,
+ configurable: true
});
-}
-/**
- * ONLY AVAILABLE IN NODE.JS RUNTIME.
- *
- * Promisified version of fs.stat().
- */
-var fsStat = util.promisify(fs.stat);
-var fsCreateReadStream = fs.createReadStream;
-
-/**
- * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob,
- * append blob, or page blob.
- *
- * @export
- * @class BlobClient
- */
-var BlobClient = /** @class */ (function (_super) {
- tslib.__extends(BlobClient, _super);
- function BlobClient(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) {
- var _a;
- var _this = this;
- options = options || {};
- var pipeline;
- var url;
- if (credentialOrPipelineOrContainerName instanceof Pipeline) {
- // (url: string, pipeline: Pipeline)
- url = urlOrConnectionString;
- pipeline = credentialOrPipelineOrContainerName;
- }
- else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||
- credentialOrPipelineOrContainerName instanceof AnonymousCredential ||
- coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) {
- // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)
- url = urlOrConnectionString;
- options = blobNameOrOptions;
- pipeline = newPipeline(credentialOrPipelineOrContainerName, options);
- }
- else if (!credentialOrPipelineOrContainerName &&
- typeof credentialOrPipelineOrContainerName !== "string") {
- // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)
- // The second parameter is undefined. Use anonymous credential.
- url = urlOrConnectionString;
- pipeline = newPipeline(new AnonymousCredential(), options);
- }
- else if (credentialOrPipelineOrContainerName &&
- typeof credentialOrPipelineOrContainerName === "string" &&
- blobNameOrOptions &&
- typeof blobNameOrOptions === "string") {
- // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)
- var containerName = credentialOrPipelineOrContainerName;
- var blobName = blobNameOrOptions;
- var extractedCreds = extractConnectionStringParts(urlOrConnectionString);
- if (extractedCreds.kind === "AccountConnString") {
- {
- var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);
- url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName));
- options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri);
- pipeline = newPipeline(sharedKeyCredential, options);
- }
- }
- else if (extractedCreds.kind === "SASConnString") {
- url =
- appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) +
- "?" +
- extractedCreds.accountSas;
- pipeline = newPipeline(new AnonymousCredential(), options);
- }
- else {
- throw new Error("Connection string must be either an Account connection string or a SAS connection string");
- }
- }
- else {
- throw new Error("Expecting non-empty strings for containerName and blobName parameters");
- }
- _this = _super.call(this, url, pipeline) || this;
- (_a = _this.getBlobAndContainerNamesFromUrl(), _this._name = _a.blobName, _this._containerName = _a.containerName);
- _this.blobContext = new Blob$1(_this.storageClientContext);
- return _this;
- }
- Object.defineProperty(BlobClient.prototype, "name", {
+ Object.defineProperty(BlobQueryResponse.prototype, "copyCompletedOn", {
/**
- * The name of the blob.
+ * Conclusion time of the last attempted
+ * Copy File operation where this file was the destination file. This value
+ * can specify the time of a completed, aborted, or failed copy attempt.
+ *
+ * @readonly
+ * @type {(Date | undefined)}
+ * @memberof BlobQueryResponse
*/
get: function () {
- return this._name;
+ return undefined;
},
enumerable: false,
configurable: true
});
- Object.defineProperty(BlobClient.prototype, "containerName", {
+ Object.defineProperty(BlobQueryResponse.prototype, "copyId", {
/**
- * The name of the storage container the blob is associated with.
+ * String identifier for the last attempted Copy
+ * File operation where this file was the destination file.
+ *
+ * @readonly
+ * @type {(string | undefined)}
+ * @memberof BlobQueryResponse
*/
get: function () {
- return this._containerName;
+ return this.originalResponse.copyId;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "copyProgress", {
+ /**
+ * Contains the number of bytes copied and
+ * the total bytes in the source in the last attempted Copy File operation
+ * where this file was the destination file. Can show between 0 and
+ * Content-Length bytes copied.
+ *
+ * @readonly
+ * @type {(string | undefined)}
+ * @memberof BlobQueryResponse
+ */
+ get: function () {
+ return this.originalResponse.copyProgress;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "copySource", {
+ /**
+ * URL up to 2KB in length that specifies the
+ * source file used in the last attempted Copy File operation where this file
+ * was the destination file.
+ *
+ * @readonly
+ * @type {(string | undefined)}
+ * @memberof BlobQueryResponse
+ */
+ get: function () {
+ return this.originalResponse.copySource;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "copyStatus", {
+ /**
+ * State of the copy operation
+ * identified by 'x-ms-copy-id'. Possible values include: 'pending',
+ * 'success', 'aborted', 'failed'
+ *
+ * @readonly
+ * @type {(CopyStatusType | undefined)}
+ * @memberof BlobQueryResponse
+ */
+ get: function () {
+ return this.originalResponse.copyStatus;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "copyStatusDescription", {
+ /**
+ * Only appears when
+ * x-ms-copy-status is failed or pending. Describes cause of fatal or
+ * non-fatal copy operation failure.
+ *
+ * @readonly
+ * @type {(string | undefined)}
+ * @memberof BlobQueryResponse
+ */
+ get: function () {
+ return this.originalResponse.copyStatusDescription;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "leaseDuration", {
+ /**
+ * When a blob is leased,
+ * specifies whether the lease is of infinite or fixed duration. Possible
+ * values include: 'infinite', 'fixed'.
+ *
+ * @readonly
+ * @type {(LeaseDurationType | undefined)}
+ * @memberof BlobQueryResponse
+ */
+ get: function () {
+ return this.originalResponse.leaseDuration;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "leaseState", {
+ /**
+ * Lease state of the blob. Possible
+ * values include: 'available', 'leased', 'expired', 'breaking', 'broken'.
+ *
+ * @readonly
+ * @type {(LeaseStateType | undefined)}
+ * @memberof BlobQueryResponse
+ */
+ get: function () {
+ return this.originalResponse.leaseState;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "leaseStatus", {
+ /**
+ * The current lease status of the
+ * blob. Possible values include: 'locked', 'unlocked'.
+ *
+ * @readonly
+ * @type {(LeaseStatusType | undefined)}
+ * @memberof BlobQueryResponse
+ */
+ get: function () {
+ return this.originalResponse.leaseStatus;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "date", {
+ /**
+ * A UTC date/time value generated by the service that
+ * indicates the time at which the response was initiated.
+ *
+ * @readonly
+ * @type {(Date | undefined)}
+ * @memberof BlobQueryResponse
+ */
+ get: function () {
+ return this.originalResponse.date;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "blobCommittedBlockCount", {
+ /**
+ * The number of committed blocks
+ * present in the blob. This header is returned only for append blobs.
+ *
+ * @readonly
+ * @type {(number | undefined)}
+ * @memberof BlobQueryResponse
+ */
+ get: function () {
+ return this.originalResponse.blobCommittedBlockCount;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "etag", {
+ /**
+ * The ETag contains a value that you can use to
+ * perform operations conditionally, in quotes.
+ *
+ * @readonly
+ * @type {(string | undefined)}
+ * @memberof BlobQueryResponse
+ */
+ get: function () {
+ return this.originalResponse.etag;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "errorCode", {
+ /**
+ * The error code.
+ *
+ * @readonly
+ * @type {(string | undefined)}
+ * @memberof BlobQueryResponse
+ */
+ get: function () {
+ return this.originalResponse.errorCode;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "isServerEncrypted", {
+ /**
+ * The value of this header is set to
+ * true if the file data and application metadata are completely encrypted
+ * using the specified algorithm. Otherwise, the value is set to false (when
+ * the file is unencrypted, or if only parts of the file/application metadata
+ * are encrypted).
+ *
+ * @readonly
+ * @type {(boolean | undefined)}
+ * @memberof BlobQueryResponse
+ */
+ get: function () {
+ return this.originalResponse.isServerEncrypted;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "blobContentMD5", {
+ /**
+ * If the blob has a MD5 hash, and if
+ * request contains range header (Range or x-ms-range), this response header
+ * is returned with the value of the whole blob's MD5 value. This value may
+ * or may not be equal to the value returned in Content-MD5 header, with the
+ * latter calculated from the requested range.
+ *
+ * @readonly
+ * @type {(Uint8Array | undefined)}
+ * @memberof BlobQueryResponse
+ */
+ get: function () {
+ return this.originalResponse.blobContentMD5;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "lastModified", {
+ /**
+ * Returns the date and time the file was last
+ * modified. Any operation that modifies the file or its properties updates
+ * the last modified time.
+ *
+ * @readonly
+ * @type {(Date | undefined)}
+ * @memberof BlobQueryResponse
+ */
+ get: function () {
+ return this.originalResponse.lastModified;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "metadata", {
+ /**
+ * A name-value pair
+ * to associate with a file storage object.
+ *
+ * @readonly
+ * @type {(Metadata | undefined)}
+ * @memberof BlobQueryResponse
+ */
+ get: function () {
+ return this.originalResponse.metadata;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "requestId", {
+ /**
+ * This header uniquely identifies the request
+ * that was made and can be used for troubleshooting the request.
+ *
+ * @readonly
+ * @type {(string | undefined)}
+ * @memberof BlobQueryResponse
+ */
+ get: function () {
+ return this.originalResponse.requestId;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "clientRequestId", {
+ /**
+ * If a client request id header is sent in the request, this header will be present in the
+ * response with the same value.
+ *
+ * @readonly
+ * @type {(string | undefined)}
+ * @memberof BlobQueryResponse
+ */
+ get: function () {
+ return this.originalResponse.clientRequestId;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "version", {
+ /**
+ * Indicates the version of the File service used
+ * to execute the request.
+ *
+ * @readonly
+ * @type {(string | undefined)}
+ * @memberof BlobQueryResponse
+ */
+ get: function () {
+ return this.originalResponse.version;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "encryptionKeySha256", {
+ /**
+ * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned
+ * when the blob was encrypted with a customer-provided key.
+ *
+ * @readonly
+ * @type {(string | undefined)}
+ * @memberof BlobQueryResponse
+ */
+ get: function () {
+ return this.originalResponse.encryptionKeySha256;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "contentCrc64", {
+ /**
+ * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to
+ * true, then the request returns a crc64 for the range, as long as the range size is less than
+ * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is
+ * specified in the same request, it will fail with 400(Bad Request)
+ *
+ * @type {(Uint8Array | undefined)}
+ * @memberof BlobQueryResponse
+ */
+ get: function () {
+ return this.originalResponse.contentCrc64;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "blobBody", {
+ /**
+ * The response body as a browser Blob.
+ * Always undefined in node.js.
+ *
+ * @readonly
+ * @type {(Promise | undefined)}
+ * @memberof BlobQueryResponse
+ */
+ get: function () {
+ return undefined;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "readableStreamBody", {
+ /**
+ * The response body as a node.js Readable stream.
+ * Always undefined in the browser.
+ *
+ * It will parse avor data returned by blob query.
+ *
+ * @readonly
+ * @type {(NodeJS.ReadableStream | undefined)}
+ * @memberof BlobQueryResponse
+ */
+ get: function () {
+ return coreHttp.isNode ? this.blobDownloadStream : undefined;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobQueryResponse.prototype, "_response", {
+ /**
+ * The HTTP response.
+ *
+ * @type {HttpResponse}
+ * @memberof BlobQueryResponse
+ */
+ get: function () {
+ return this.originalResponse._response;
},
enumerable: false,
configurable: true
});
+ return BlobQueryResponse;
+}());
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+/**
+ * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key.
+ *
+ * @export
+ * @class StorageSharedKeyCredentialPolicy
+ * @extends {CredentialPolicy}
+ */
+var StorageSharedKeyCredentialPolicy = /** @class */ (function (_super) {
+ tslib.__extends(StorageSharedKeyCredentialPolicy, _super);
/**
- * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp.
- * Provide "" will remove the snapshot and return a Client to the base blob.
- *
- * @param {string} snapshot The snapshot timestamp.
- * @returns {BlobClient} A new BlobClient object identical to the source but with the specified snapshot timestamp
- * @memberof BlobClient
- */
- BlobClient.prototype.withSnapshot = function (snapshot) {
- return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline);
- };
- /**
- * Creates a new BlobClient object pointing to a version of this blob.
- * Provide "" will remove the versionId and return a Client to the base blob.
- *
- * @param {string} versionId The versionId.
- * @returns {BlobClient} A new BlobClient object pointing to the version of this blob.
- * @memberof BlobClient
+ * Creates an instance of StorageSharedKeyCredentialPolicy.
+ * @param {RequestPolicy} nextPolicy
+ * @param {RequestPolicyOptions} options
+ * @param {StorageSharedKeyCredential} factory
+ * @memberof StorageSharedKeyCredentialPolicy
*/
- BlobClient.prototype.withVersion = function (versionId) {
- return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId.length === 0 ? undefined : versionId), this.pipeline);
- };
+ function StorageSharedKeyCredentialPolicy(nextPolicy, options, factory) {
+ var _this = _super.call(this, nextPolicy, options) || this;
+ _this.factory = factory;
+ return _this;
+ }
/**
- * Creates a AppendBlobClient object.
+ * Signs request.
*
- * @returns {AppendBlobClient}
- * @memberof BlobClient
+ * @protected
+ * @param {WebResource} request
+ * @returns {WebResource}
+ * @memberof StorageSharedKeyCredentialPolicy
*/
- BlobClient.prototype.getAppendBlobClient = function () {
- return new AppendBlobClient(this.url, this.pipeline);
+ StorageSharedKeyCredentialPolicy.prototype.signRequest = function (request) {
+ request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString());
+ if (request.body && typeof request.body === "string" && request.body.length > 0) {
+ request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body));
+ }
+ var stringToSign = [
+ request.method.toUpperCase(),
+ this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE),
+ this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING),
+ this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH),
+ this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5),
+ this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE),
+ this.getHeaderValueToSign(request, HeaderConstants.DATE),
+ this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE),
+ this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH),
+ this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH),
+ this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE),
+ this.getHeaderValueToSign(request, HeaderConstants.RANGE)
+ ].join("\n") +
+ "\n" +
+ this.getCanonicalizedHeadersString(request) +
+ this.getCanonicalizedResourceString(request);
+ var signature = this.factory.computeHMACSHA256(stringToSign);
+ request.headers.set(HeaderConstants.AUTHORIZATION, "SharedKey " + this.factory.accountName + ":" + signature);
+ // console.log(`[URL]:${request.url}`);
+ // console.log(`[HEADERS]:${request.headers.toString()}`);
+ // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`);
+ // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`);
+ return request;
};
/**
- * Creates a BlockBlobClient object.
+ * Retrieve header value according to shared key sign rules.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key
*
- * @returns {BlockBlobClient}
- * @memberof BlobClient
+ * @private
+ * @param {WebResource} request
+ * @param {string} headerName
+ * @returns {string}
+ * @memberof StorageSharedKeyCredentialPolicy
*/
- BlobClient.prototype.getBlockBlobClient = function () {
- return new BlockBlobClient(this.url, this.pipeline);
+ StorageSharedKeyCredentialPolicy.prototype.getHeaderValueToSign = function (request, headerName) {
+ var value = request.headers.get(headerName);
+ if (!value) {
+ return "";
+ }
+ // When using version 2015-02-21 or later, if Content-Length is zero, then
+ // set the Content-Length part of the StringToSign to an empty string.
+ // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key
+ if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") {
+ return "";
+ }
+ return value;
};
/**
- * Creates a PageBlobClient object.
+ * To construct the CanonicalizedHeaders portion of the signature string, follow these steps:
+ * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header.
+ * 2. Convert each HTTP header name to lowercase.
+ * 3. Sort the headers lexicographically by header name, in ascending order.
+ * Each header may appear only once in the string.
+ * 4. Replace any linear whitespace in the header value with a single space.
+ * 5. Trim any whitespace around the colon in the header.
+ * 6. Finally, append a new-line character to each canonicalized header in the resulting list.
+ * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string.
*
- * @returns {PageBlobClient}
- * @memberof BlobClient
+ * @private
+ * @param {WebResource} request
+ * @returns {string}
+ * @memberof StorageSharedKeyCredentialPolicy
*/
- BlobClient.prototype.getPageBlobClient = function () {
- return new PageBlobClient(this.url, this.pipeline);
+ StorageSharedKeyCredentialPolicy.prototype.getCanonicalizedHeadersString = function (request) {
+ var headersArray = request.headers.headersArray().filter(function (value) {
+ return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE);
+ });
+ headersArray.sort(function (a, b) {
+ return a.name.toLowerCase().localeCompare(b.name.toLowerCase());
+ });
+ // Remove duplicate headers
+ headersArray = headersArray.filter(function (value, index, array) {
+ if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) {
+ return false;
+ }
+ return true;
+ });
+ var canonicalizedHeadersStringToSign = "";
+ headersArray.forEach(function (header) {
+ canonicalizedHeadersStringToSign += header.name
+ .toLowerCase()
+ .trimRight() + ":" + header.value.trimLeft() + "\n";
+ });
+ return canonicalizedHeadersStringToSign;
};
/**
- * Reads or downloads a blob from the system, including its metadata and properties.
- * You can also call Get Blob to read a snapshot.
- *
- * * In Node.js, data returns in a Readable stream readableStreamBody
- * * In browsers, data returns in a promise blobBody
- *
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob
- *
- * @param {number} [offset] From which position of the blob to download, >= 0
- * @param {number} [count] How much data to be downloaded, > 0. Will download to the end when undefined
- * @param {BlobDownloadOptions} [options] Optional options to Blob Download operation.
- * @returns {Promise}
- * @memberof BlobClient
- *
- * Example usage (Node.js):
- *
- * ```js
- * // Download and convert a blob to a string
- * const downloadBlockBlobResponse = await blobClient.download();
- * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody);
- * console.log("Downloaded blob content:", downloaded.toString());
- *
- * async function streamToBuffer(readableStream) {
- * return new Promise((resolve, reject) => {
- * const chunks = [];
- * readableStream.on("data", (data) => {
- * chunks.push(data instanceof Buffer ? data : Buffer.from(data));
- * });
- * readableStream.on("end", () => {
- * resolve(Buffer.concat(chunks));
- * });
- * readableStream.on("error", reject);
- * });
- * }
- * ```
- *
- * Example usage (browser):
- *
- * ```js
- * // Download and convert a blob to a string
- * const downloadBlockBlobResponse = await blobClient.download();
- * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody);
- * console.log(
- * "Downloaded blob content",
- * downloaded
- * );
+ * Retrieves the webResource canonicalized resource string.
*
- * async function blobToString(blob: Blob): Promise {
- * const fileReader = new FileReader();
- * return new Promise((resolve, reject) => {
- * fileReader.onloadend = (ev: any) => {
- * resolve(ev.target!.result);
- * };
- * fileReader.onerror = reject;
- * fileReader.readAsText(blob);
- * });
- * }
- * ```
+ * @private
+ * @param {WebResource} request
+ * @returns {string}
+ * @memberof StorageSharedKeyCredentialPolicy
*/
- BlobClient.prototype.download = function (offset, count, options) {
- var _a;
- if (offset === void 0) { offset = 0; }
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, res_1, wrappedRes, e_1;
- var _this = this;
- return tslib.__generator(this, function (_c) {
- switch (_c.label) {
- case 0:
- options.conditions = options.conditions || {};
- options.conditions = options.conditions || {};
- ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
- _b = createSpan("BlobClient-download", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
- _c.label = 1;
- case 1:
- _c.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.blobContext.download({
- abortSignal: options.abortSignal,
- leaseAccessConditions: options.conditions,
- modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
- onDownloadProgress: coreHttp.isNode ? undefined : options.onProgress,
- range: offset === 0 && !count ? undefined : rangeToString({ offset: offset, count: count }),
- rangeGetContentMD5: options.rangeGetContentMD5,
- rangeGetContentCRC64: options.rangeGetContentCrc64,
- snapshot: options.snapshot,
- cpkInfo: options.customerProvidedKey,
- spanOptions: spanOptions
- })];
- case 2:
- res_1 = _c.sent();
- wrappedRes = tslib.__assign(tslib.__assign({}, res_1), { _response: res_1._response, objectReplicationDestinationPolicyId: res_1.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res_1.objectReplicationRules) });
- // We support retrying when download stream unexpected ends in Node.js runtime
- // Following code shouldn't be bundled into browser build, however some
- // bundlers may try to bundle following code and "FileReadResponse.ts".
- // In this case, "FileDownloadResponse.browser.ts" will be used as a shim of "FileDownloadResponse.ts"
- // The config is in package.json "browser" field
- if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) {
- // TODO: Default value or make it a required parameter?
- options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS;
- }
- if (res_1.contentLength === undefined) {
- throw new RangeError("File download response doesn't contain valid content length header");
- }
- if (!res_1.etag) {
- throw new RangeError("File download response doesn't contain valid etag header");
- }
- return [2 /*return*/, new BlobDownloadResponse(wrappedRes, function (start) { return tslib.__awaiter(_this, void 0, void 0, function () {
- var updatedOptions;
- var _a;
- return tslib.__generator(this, function (_b) {
- switch (_b.label) {
- case 0:
- updatedOptions = {
- leaseAccessConditions: options.conditions,
- modifiedAccessConditions: {
- ifMatch: options.conditions.ifMatch || res_1.etag,
- ifModifiedSince: options.conditions.ifModifiedSince,
- ifNoneMatch: options.conditions.ifNoneMatch,
- ifUnmodifiedSince: options.conditions.ifUnmodifiedSince,
- ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions
- },
- range: rangeToString({
- count: offset + res_1.contentLength - start,
- offset: start
- }),
- rangeGetContentMD5: options.rangeGetContentMD5,
- rangeGetContentCRC64: options.rangeGetContentCrc64,
- snapshot: options.snapshot,
- cpkInfo: options.customerProvidedKey
- };
- return [4 /*yield*/, this.blobContext.download(tslib.__assign({ abortSignal: options.abortSignal }, updatedOptions))];
- case 1:
- // Debug purpose only
- // console.log(
- // `Read from internal stream, range: ${
- // updatedOptions.range
- // }, options: ${JSON.stringify(updatedOptions)}`
- // );
- return [2 /*return*/, (_b.sent()).readableStreamBody];
- }
- });
- }); }, offset, res_1.contentLength, {
- abortSignal: options.abortSignal,
- maxRetryRequests: options.maxRetryRequests,
- onProgress: options.onProgress
- })];
- case 3:
- e_1 = _c.sent();
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_1.message
- });
- throw e_1;
- case 4:
- span.end();
- return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
+ StorageSharedKeyCredentialPolicy.prototype.getCanonicalizedResourceString = function (request) {
+ var path = getURLPath(request.url) || "/";
+ var canonicalizedResourceString = "";
+ canonicalizedResourceString += "/" + this.factory.accountName + path;
+ var queries = getURLQueries(request.url);
+ var lowercaseQueries = {};
+ if (queries) {
+ var queryKeys = [];
+ for (var key in queries) {
+ if (queries.hasOwnProperty(key)) {
+ var lowercaseKey = key.toLowerCase();
+ lowercaseQueries[lowercaseKey] = queries[key];
+ queryKeys.push(lowercaseKey);
}
- });
- });
+ }
+ queryKeys.sort();
+ for (var _i = 0, queryKeys_1 = queryKeys; _i < queryKeys_1.length; _i++) {
+ var key = queryKeys_1[_i];
+ canonicalizedResourceString += "\n" + key + ":" + decodeURIComponent(lowercaseQueries[key]);
+ }
+ }
+ return canonicalizedResourceString;
};
+ return StorageSharedKeyCredentialPolicy;
+}(CredentialPolicy));
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ *
+ * StorageSharedKeyCredential for account key authorization of Azure Storage service.
+ *
+ * @export
+ * @class StorageSharedKeyCredential
+ * @extends {Credential}
+ */
+var StorageSharedKeyCredential = /** @class */ (function (_super) {
+ tslib.__extends(StorageSharedKeyCredential, _super);
/**
- * Returns true if the Azure blob resource represented by this client exists; false otherwise.
- *
- * NOTE: use this function with care since an existing blob might be deleted by other clients or
- * applications. Vice versa new blobs might be added by other clients or applications after this
- * function completes.
- *
- * @param {BlobExistsOptions} [options] options to Exists operation.
- * @returns {Promise}
- * @memberof BlobClient
+ * Creates an instance of StorageSharedKeyCredential.
+ * @param {string} accountName
+ * @param {string} accountKey
+ * @memberof StorageSharedKeyCredential
*/
- BlobClient.prototype.exists = function (options) {
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, e_2;
- return tslib.__generator(this, function (_b) {
- switch (_b.label) {
- case 0:
- _a = createSpan("BlobClient-exists", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
- _b.label = 1;
- case 1:
- _b.trys.push([1, 3, 4, 5]);
- ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
- return [4 /*yield*/, this.getProperties({
- abortSignal: options.abortSignal,
- customerProvidedKey: options.customerProvidedKey,
- conditions: options.conditions,
- tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions })
- })];
- case 2:
- _b.sent();
- return [2 /*return*/, true];
- case 3:
- e_2 = _b.sent();
- if (e_2.statusCode === 404) {
- span.setStatus({
- code: api.CanonicalCode.NOT_FOUND,
- message: "Expected exception when checking blob existence"
- });
- return [2 /*return*/, false];
- }
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_2.message
- });
- throw e_2;
- case 4:
- span.end();
- return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
- }
- });
- });
- };
+ function StorageSharedKeyCredential(accountName, accountKey) {
+ var _this = _super.call(this) || this;
+ _this.accountName = accountName;
+ _this.accountKey = Buffer.from(accountKey, "base64");
+ return _this;
+ }
/**
- * Returns all user-defined metadata, standard HTTP properties, and system properties
- * for the blob. It does not return the content of the blob.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties
- *
- * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if
- * they originally contained uppercase characters. This differs from the metadata keys returned by
- * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which
- * will retain their original casing.
+ * Creates a StorageSharedKeyCredentialPolicy object.
*
- * @param {BlobGetPropertiesOptions} [options] Optional options to Get Properties operation.
- * @returns {Promise}
- * @memberof BlobClient
+ * @param {RequestPolicy} nextPolicy
+ * @param {RequestPolicyOptions} options
+ * @returns {StorageSharedKeyCredentialPolicy}
+ * @memberof StorageSharedKeyCredential
*/
- BlobClient.prototype.getProperties = function (options) {
- var _a;
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, res, e_3;
- return tslib.__generator(this, function (_c) {
- switch (_c.label) {
- case 0:
- _b = createSpan("BlobClient-getProperties", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
- _c.label = 1;
- case 1:
- _c.trys.push([1, 3, 4, 5]);
- options.conditions = options.conditions || {};
- ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
- return [4 /*yield*/, this.blobContext.getProperties({
- abortSignal: options.abortSignal,
- leaseAccessConditions: options.conditions,
- modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
- cpkInfo: options.customerProvidedKey,
- spanOptions: spanOptions
- })];
- case 2:
- res = _c.sent();
- return [2 /*return*/, tslib.__assign(tslib.__assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) })];
- case 3:
- e_3 = _c.sent();
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_3.message
- });
- throw e_3;
- case 4:
- span.end();
- return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
- }
- });
- });
+ StorageSharedKeyCredential.prototype.create = function (nextPolicy, options) {
+ return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this);
};
/**
- * Marks the specified blob or snapshot for deletion. The blob is later deleted
- * during garbage collection. Note that in order to delete a blob, you must delete
- * all of its snapshots. You can delete both at the same time with the Delete
- * Blob operation.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob
+ * Generates a hash signature for an HTTP request or for a SAS.
*
- * @param {BlobDeleteOptions} [options] Optional options to Blob Delete operation.
- * @returns {Promise}
- * @memberof BlobClient
+ * @param {string} stringToSign
+ * @returns {string}
+ * @memberof StorageSharedKeyCredential
*/
- BlobClient.prototype.delete = function (options) {
- var _a;
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, e_4;
- return tslib.__generator(this, function (_c) {
- switch (_c.label) {
- case 0:
- _b = createSpan("BlobClient-delete", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
- options.conditions = options.conditions || {};
- _c.label = 1;
- case 1:
- _c.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.blobContext.deleteMethod({
- abortSignal: options.abortSignal,
- deleteSnapshots: options.deleteSnapshots,
- leaseAccessConditions: options.conditions,
- modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
- spanOptions: spanOptions
- })];
- case 2: return [2 /*return*/, _c.sent()];
- case 3:
- e_4 = _c.sent();
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_4.message
- });
- throw e_4;
- case 4:
- span.end();
- return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
- }
- });
- });
+ StorageSharedKeyCredential.prototype.computeHMACSHA256 = function (stringToSign) {
+ return crypto.createHmac("sha256", this.accountKey)
+ .update(stringToSign, "utf8")
+ .digest("base64");
};
+ return StorageSharedKeyCredential;
+}(Credential));
+
+/*
+ * Copyright (c) Microsoft Corporation. All rights reserved.
+ * Licensed under the MIT License. See License.txt in the project root for
+ * license information.
+ *
+ * Code generated by Microsoft (R) AutoRest Code Generator.
+ * Changes may cause incorrect behavior and will be lost if the code is
+ * regenerated.
+ */
+var packageName = "azure-storage-blob";
+var packageVersion = "12.2.1";
+var StorageClientContext = /** @class */ (function (_super) {
+ tslib.__extends(StorageClientContext, _super);
/**
- * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted
- * during garbage collection. Note that in order to delete a blob, you must delete
- * all of its snapshots. You can delete both at the same time with the Delete
- * Blob operation.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob
- *
- * @param {BlobDeleteOptions} [options] Optional options to Blob Delete operation.
- * @returns {Promise}
- * @memberof BlobClient
+ * Initializes a new instance of the StorageClientContext class.
+ * @param url The URL of the service account, container, or blob that is the targe of the desired
+ * operation.
+ * @param [options] The parameter options
*/
- BlobClient.prototype.deleteIfExists = function (options) {
- var _a, _b;
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var _c, span, spanOptions, res, e_5;
- return tslib.__generator(this, function (_d) {
- switch (_d.label) {
- case 0:
- _c = createSpan("BlobClient-deleteIfExists", options.tracingOptions), span = _c.span, spanOptions = _c.spanOptions;
- _d.label = 1;
- case 1:
- _d.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.delete(tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
- case 2:
- res = _d.sent();
- return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable
- })];
- case 3:
- e_5 = _d.sent();
- if (((_a = e_5.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") {
- span.setStatus({
- code: api.CanonicalCode.NOT_FOUND,
- message: "Expected exception when deleting a blob or snapshot only if it exists."
- });
- return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_5.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_5.response })];
- }
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_5.message
- });
- throw e_5;
- case 4:
- span.end();
- return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
- }
- });
- });
- };
+ function StorageClientContext(url, options) {
+ var _this = this;
+ if (url == undefined) {
+ throw new Error("'url' cannot be null.");
+ }
+ if (!options) {
+ options = {};
+ }
+ if (!options.userAgent) {
+ var defaultUserAgent = coreHttp.getDefaultUserAgentValue();
+ options.userAgent = packageName + "/" + packageVersion + " " + defaultUserAgent;
+ }
+ _this = _super.call(this, undefined, options) || this;
+ _this.version = "2019-12-12";
+ _this.baseUri = "{url}";
+ _this.requestContentType = "application/json; charset=utf-8";
+ _this.url = url;
+ return _this;
+ }
+ return StorageClientContext;
+}(coreHttp.ServiceClient));
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+(function (BlockBlobTier) {
/**
- * Restores the contents and metadata of soft deleted blob and any associated
- * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29
- * or later.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob
- *
- * @param {BlobUndeleteOptions} [options] Optional options to Blob Undelete operation.
- * @returns {Promise}
- * @memberof BlobClient
+ * Optimized for storing data that is accessed frequently.
*/
- BlobClient.prototype.undelete = function (options) {
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, e_6;
- return tslib.__generator(this, function (_b) {
- switch (_b.label) {
- case 0:
- _a = createSpan("BlobClient-undelete", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
- _b.label = 1;
- case 1:
- _b.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.blobContext.undelete({
- abortSignal: options.abortSignal,
- spanOptions: spanOptions
- })];
- case 2: return [2 /*return*/, _b.sent()];
- case 3:
- e_6 = _b.sent();
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_6.message
- });
- throw e_6;
- case 4:
- span.end();
- return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
- }
- });
- });
- };
+ BlockBlobTier["Hot"] = "Hot";
/**
- * Sets system properties on the blob.
- *
- * If no value provided, or no value provided for the specified blob HTTP headers,
- * these blob HTTP headers without a value will be cleared.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties
- *
- * @param {BlobHTTPHeaders} [blobHTTPHeaders] If no value provided, or no value provided for
- * the specified blob HTTP headers, these blob HTTP
- * headers without a value will be cleared.
- * @param {BlobSetHTTPHeadersOptions} [options] Optional options to Blob Set HTTP Headers operation.
- * @returns {Promise}
- * @memberof BlobClient
+ * Optimized for storing data that is infrequently accessed and stored for at least 30 days.
*/
- BlobClient.prototype.setHTTPHeaders = function (blobHTTPHeaders, options) {
- var _a;
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, e_7;
- return tslib.__generator(this, function (_c) {
- switch (_c.label) {
- case 0:
- _b = createSpan("BlobClient-setHTTPHeaders", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
- options.conditions = options.conditions || {};
- _c.label = 1;
- case 1:
- _c.trys.push([1, 3, 4, 5]);
- ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
- return [4 /*yield*/, this.blobContext.setHTTPHeaders({
- abortSignal: options.abortSignal,
- blobHTTPHeaders: blobHTTPHeaders,
- leaseAccessConditions: options.conditions,
- modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
- cpkInfo: options.customerProvidedKey,
- spanOptions: spanOptions
- })];
- case 2: return [2 /*return*/, _c.sent()];
- case 3:
- e_7 = _c.sent();
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_7.message
- });
- throw e_7;
- case 4:
- span.end();
- return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
- }
- });
- });
- };
+ BlockBlobTier["Cool"] = "Cool";
/**
- * Sets user-defined metadata for the specified blob as one or more name-value pairs.
- *
- * If no option provided, or no metadata defined in the parameter, the blob
- * metadata will be removed.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata
- *
- * @param {Metadata} [metadata] Replace existing metadata with this value.
- * If no value provided the existing metadata will be removed.
- * @param {BlobSetMetadataOptions} [options] Optional options to Set Metadata operation.
- * @returns {Promise}
- * @memberof BlobClient
+ * Optimized for storing data that is rarely accessed and stored for at least 180 days
+ * with flexible latency requirements (on the order of hours).
*/
- BlobClient.prototype.setMetadata = function (metadata, options) {
- var _a;
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, e_8;
- return tslib.__generator(this, function (_c) {
- switch (_c.label) {
- case 0:
- _b = createSpan("BlobClient-setMetadata", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
- options.conditions = options.conditions || {};
- _c.label = 1;
- case 1:
- _c.trys.push([1, 3, 4, 5]);
- ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
- return [4 /*yield*/, this.blobContext.setMetadata({
- abortSignal: options.abortSignal,
- leaseAccessConditions: options.conditions,
- metadata: metadata,
- modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
- cpkInfo: options.customerProvidedKey,
- encryptionScope: options.encryptionScope,
- spanOptions: spanOptions
- })];
- case 2: return [2 /*return*/, _c.sent()];
- case 3:
- e_8 = _c.sent();
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_8.message
- });
- throw e_8;
- case 4:
- span.end();
- return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
- }
- });
- });
- };
+ BlockBlobTier["Archive"] = "Archive";
+})(exports.BlockBlobTier || (exports.BlockBlobTier = {}));
+(function (PremiumPageBlobTier) {
/**
- * Sets tags on the underlying blob.
- * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters.
- * Valid tag key and value characters include lower and upper case letters, digits (0-9),
- * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_').
- *
- * @param {Tags} tags
- * @param {BlobSetTagsOptions} [options={}]
- * @returns {Promise}
- * @memberof BlobClient
+ * P4 Tier.
*/
- BlobClient.prototype.setTags = function (tags, options) {
- var _a;
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, e_9;
- return tslib.__generator(this, function (_c) {
- switch (_c.label) {
- case 0:
- _b = createSpan("BlobClient-setTags", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
- _c.label = 1;
- case 1:
- _c.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.blobContext.setTags({
- abortSignal: options.abortSignal,
- modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
- spanOptions: spanOptions,
- tags: toBlobTags(tags)
- })];
- case 2: return [2 /*return*/, _c.sent()];
- case 3:
- e_9 = _c.sent();
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_9.message
- });
- throw e_9;
- case 4:
- span.end();
- return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
- }
- });
- });
- };
+ PremiumPageBlobTier["P4"] = "P4";
/**
- * Gets the tags associated with the underlying blob.
- *
- * @param {BlobGetTagsOptions} [options={}]
- * @returns {Promise}
- * @memberof BlobClient
+ * P6 Tier.
*/
- BlobClient.prototype.getTags = function (options) {
- var _a;
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, response, wrappedResponse, e_10;
- return tslib.__generator(this, function (_c) {
- switch (_c.label) {
- case 0:
- _b = createSpan("BlobClient-getTags", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
- _c.label = 1;
- case 1:
- _c.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.blobContext.getTags({
- abortSignal: options.abortSignal,
- modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
- spanOptions: spanOptions
- })];
- case 2:
- response = _c.sent();
- wrappedResponse = tslib.__assign(tslib.__assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} });
- return [2 /*return*/, wrappedResponse];
- case 3:
- e_10 = _c.sent();
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_10.message
- });
- throw e_10;
- case 4:
- span.end();
- return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
- }
- });
- });
- };
+ PremiumPageBlobTier["P6"] = "P6";
/**
- * Get a {@link BlobLeaseClient} that manages leases on the blob.
- *
- * @param {string} [proposeLeaseId] Initial proposed lease Id.
- * @returns {BlobLeaseClient} A new BlobLeaseClient object for managing leases on the blob.
- * @memberof BlobClient
+ * P10 Tier.
*/
- BlobClient.prototype.getBlobLeaseClient = function (proposeLeaseId) {
- return new BlobLeaseClient(this, proposeLeaseId);
- };
+ PremiumPageBlobTier["P10"] = "P10";
/**
- * Creates a read-only snapshot of a blob.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob
- *
- * @param {BlobCreateSnapshotOptions} [options] Optional options to the Blob Create Snapshot operation.
- * @returns {Promise}
- * @memberof BlobClient
+ * P15 Tier.
*/
- BlobClient.prototype.createSnapshot = function (options) {
- var _a;
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, e_11;
- return tslib.__generator(this, function (_c) {
- switch (_c.label) {
- case 0:
- _b = createSpan("BlobClient-createSnapshot", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
- options.conditions = options.conditions || {};
- _c.label = 1;
- case 1:
- _c.trys.push([1, 3, 4, 5]);
- ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
- return [4 /*yield*/, this.blobContext.createSnapshot({
- abortSignal: options.abortSignal,
- leaseAccessConditions: options.conditions,
- metadata: options.metadata,
- modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
- cpkInfo: options.customerProvidedKey,
- encryptionScope: options.encryptionScope,
- spanOptions: spanOptions
- })];
- case 2: return [2 /*return*/, _c.sent()];
- case 3:
- e_11 = _c.sent();
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_11.message
- });
- throw e_11;
- case 4:
- span.end();
- return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
- }
- });
- });
- };
+ PremiumPageBlobTier["P15"] = "P15";
/**
- * Asynchronously copies a blob to a destination within the storage account.
- * This method returns a long running operation poller that allows you to wait
- * indefinitely until the copy is completed.
- * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller.
- * Note that the onProgress callback will not be invoked if the operation completes in the first
- * request, and attempting to cancel a completed copy will result in an error being thrown.
- *
- * In version 2012-02-12 and later, the source for a Copy Blob operation can be
- * a committed blob in any Azure storage account.
- * Beginning with version 2015-02-21, the source for a Copy Blob operation can be
- * an Azure file in any Azure storage account.
- * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob
- * operation to copy from another storage account.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob
- *
- * Example using automatic polling:
- *
- * ```js
- * const copyPoller = await blobClient.beginCopyFromURL('url');
- * const result = await copyPoller.pollUntilDone();
- * ```
- *
- * Example using manual polling:
- *
- * ```js
- * const copyPoller = await blobClient.beginCopyFromURL('url');
- * while (!poller.isDone()) {
- * await poller.poll();
- * }
- * const result = copyPoller.getResult();
- * ```
- *
- * Example using progress updates:
- *
- * ```js
- * const copyPoller = await blobClient.beginCopyFromURL('url', {
- * onProgress(state) {
- * console.log(`Progress: ${state.copyProgress}`);
- * }
- * });
- * const result = await copyPoller.pollUntilDone();
- * ```
- *
- * Example using a changing polling interval (default 15 seconds):
- *
- * ```js
- * const copyPoller = await blobClient.beginCopyFromURL('url', {
- * intervalInMs: 1000 // poll blob every 1 second for copy progress
- * });
- * const result = await copyPoller.pollUntilDone();
- * ```
- *
- * Example using copy cancellation:
- *
- * ```js
- * const copyPoller = await blobClient.beginCopyFromURL('url');
- * // cancel operation after starting it.
- * try {
- * await copyPoller.cancelOperation();
- * // calls to get the result now throw PollerCancelledError
- * await copyPoller.getResult();
- * } catch (err) {
- * if (err.name === 'PollerCancelledError') {
- * console.log('The copy was cancelled.');
- * }
- * }
- * ```
- *
- * @param {string} copySource url to the source Azure Blob/File.
- * @param {BlobBeginCopyFromURLOptions} [options] Optional options to the Blob Start Copy From URL operation.
+ * P20 Tier.
*/
- BlobClient.prototype.beginCopyFromURL = function (copySource, options) {
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var client, poller;
- var _this = this;
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0:
- client = {
- abortCopyFromURL: function () {
- var args = [];
- for (var _i = 0; _i < arguments.length; _i++) {
- args[_i] = arguments[_i];
- }
- return _this.abortCopyFromURL.apply(_this, args);
- },
- getProperties: function () {
- var args = [];
- for (var _i = 0; _i < arguments.length; _i++) {
- args[_i] = arguments[_i];
- }
- return _this.getProperties.apply(_this, args);
- },
- startCopyFromURL: function () {
- var args = [];
- for (var _i = 0; _i < arguments.length; _i++) {
- args[_i] = arguments[_i];
- }
- return _this.startCopyFromURL.apply(_this, args);
- }
- };
- poller = new BlobBeginCopyFromUrlPoller({
- blobClient: client,
- copySource: copySource,
- intervalInMs: options.intervalInMs,
- onProgress: options.onProgress,
- resumeFrom: options.resumeFrom,
- startCopyFromURLOptions: options
- });
- // Trigger the startCopyFromURL call by calling poll.
- // Any errors from this method should be surfaced to the user.
- return [4 /*yield*/, poller.poll()];
- case 1:
- // Trigger the startCopyFromURL call by calling poll.
- // Any errors from this method should be surfaced to the user.
- _a.sent();
- return [2 /*return*/, poller];
- }
- });
- });
- };
+ PremiumPageBlobTier["P20"] = "P20";
/**
- * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero
- * length and full metadata. Version 2012-02-12 and newer.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob
- *
- * @param {string} copyId Id of the Copy From URL operation.
- * @param {BlobAbortCopyFromURLOptions} [options] Optional options to the Blob Abort Copy From URL operation.
- * @returns {Promise}
- * @memberof BlobClient
+ * P30 Tier.
*/
- BlobClient.prototype.abortCopyFromURL = function (copyId, options) {
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, e_12;
- return tslib.__generator(this, function (_b) {
- switch (_b.label) {
- case 0:
- _a = createSpan("BlobClient-abortCopyFromURL", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
- _b.label = 1;
- case 1:
- _b.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.blobContext.abortCopyFromURL(copyId, {
- abortSignal: options.abortSignal,
- leaseAccessConditions: options.conditions,
- spanOptions: spanOptions
- })];
- case 2: return [2 /*return*/, _b.sent()];
- case 3:
- e_12 = _b.sent();
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_12.message
- });
- throw e_12;
- case 4:
- span.end();
- return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
- }
- });
+ PremiumPageBlobTier["P30"] = "P30";
+ /**
+ * P40 Tier.
+ */
+ PremiumPageBlobTier["P40"] = "P40";
+ /**
+ * P50 Tier.
+ */
+ PremiumPageBlobTier["P50"] = "P50";
+ /**
+ * P60 Tier.
+ */
+ PremiumPageBlobTier["P60"] = "P60";
+ /**
+ * P70 Tier.
+ */
+ PremiumPageBlobTier["P70"] = "P70";
+ /**
+ * P80 Tier.
+ */
+ PremiumPageBlobTier["P80"] = "P80";
+})(exports.PremiumPageBlobTier || (exports.PremiumPageBlobTier = {}));
+function toAccessTier(tier) {
+ if (tier == undefined) {
+ return undefined;
+ }
+ return tier; // No more check if string is a valid AccessTier, and left this to underlay logic to decide(service).
+}
+function ensureCpkIfSpecified(cpk, isHttps) {
+ if (cpk && !isHttps) {
+ throw new RangeError("Customer-provided encryption key must be used over HTTPS.");
+ }
+ if (cpk && !cpk.encryptionAlgorithm) {
+ cpk.encryptionAlgorithm = EncryptionAlgorithmAES25;
+ }
+}
+
+/**
+ * Function that converts PageRange and ClearRange to a common Range object.
+ * PageRange and ClearRange have start and end while Range offset and count
+ * this function normalizes to Range.
+ * @param response Model PageBlob Range response
+ */
+function rangeResponseFromModel(response) {
+ var pageRange = (response._response.parsedBody.pageRange || []).map(function (x) { return ({
+ offset: x.start,
+ count: x.end - x.start
+ }); });
+ var clearRange = (response._response.parsedBody.clearRange || []).map(function (x) { return ({
+ offset: x.start,
+ count: x.end - x.start
+ }); });
+ return tslib.__assign(tslib.__assign({}, response), { pageRange: pageRange,
+ clearRange: clearRange, _response: tslib.__assign(tslib.__assign({}, response._response), { parsedBody: {
+ pageRange: pageRange,
+ clearRange: clearRange
+ } }) });
+}
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+/**
+ * This is the poller returned by {@link BlobClient.beginCopyFromURL}.
+ * This can not be instantiated directly outside of this package.
+ *
+ * @ignore
+ */
+var BlobBeginCopyFromUrlPoller = /** @class */ (function (_super) {
+ tslib.__extends(BlobBeginCopyFromUrlPoller, _super);
+ function BlobBeginCopyFromUrlPoller(options) {
+ var _this = this;
+ var blobClient = options.blobClient, copySource = options.copySource, _a = options.intervalInMs, intervalInMs = _a === void 0 ? 15000 : _a, onProgress = options.onProgress, resumeFrom = options.resumeFrom, startCopyFromURLOptions = options.startCopyFromURLOptions;
+ var state;
+ if (resumeFrom) {
+ state = JSON.parse(resumeFrom).state;
+ }
+ var operation = makeBlobBeginCopyFromURLPollOperation(tslib.__assign(tslib.__assign({}, state), { blobClient: blobClient,
+ copySource: copySource,
+ startCopyFromURLOptions: startCopyFromURLOptions }));
+ _this = _super.call(this, operation) || this;
+ if (typeof onProgress === "function") {
+ _this.onProgress(onProgress);
+ }
+ _this.intervalInMs = intervalInMs;
+ return _this;
+ }
+ BlobBeginCopyFromUrlPoller.prototype.delay = function () {
+ return coreHttp.delay(this.intervalInMs);
+ };
+ return BlobBeginCopyFromUrlPoller;
+}(coreLro.Poller));
+/**
+ * Note: Intentionally using function expression over arrow function expression
+ * so that the function can be invoked with a different context.
+ * This affects what `this` refers to.
+ * @ignore
+ */
+var cancel = function cancel(options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var state, copyId;
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0:
+ state = this.state;
+ copyId = state.copyId;
+ if (state.isCompleted) {
+ return [2 /*return*/, makeBlobBeginCopyFromURLPollOperation(state)];
+ }
+ if (!copyId) {
+ state.isCancelled = true;
+ return [2 /*return*/, makeBlobBeginCopyFromURLPollOperation(state)];
+ }
+ // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call
+ return [4 /*yield*/, state.blobClient.abortCopyFromURL(copyId, {
+ abortSignal: options.abortSignal
+ })];
+ case 1:
+ // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call
+ _a.sent();
+ state.isCancelled = true;
+ return [2 /*return*/, makeBlobBeginCopyFromURLPollOperation(state)];
+ }
+ });
+ });
+};
+/**
+ * Note: Intentionally using function expression over arrow function expression
+ * so that the function can be invoked with a different context.
+ * This affects what `this` refers to.
+ * @ignore
+ */
+var update = function update(options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var state, blobClient, copySource, startCopyFromURLOptions, result, result, copyStatus, copyProgress, prevCopyProgress, err_1;
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0:
+ state = this.state;
+ blobClient = state.blobClient, copySource = state.copySource, startCopyFromURLOptions = state.startCopyFromURLOptions;
+ if (!!state.isStarted) return [3 /*break*/, 2];
+ state.isStarted = true;
+ return [4 /*yield*/, blobClient.startCopyFromURL(copySource, startCopyFromURLOptions)];
+ case 1:
+ result = _a.sent();
+ // copyId is needed to abort
+ state.copyId = result.copyId;
+ if (result.copyStatus === "success") {
+ state.result = result;
+ state.isCompleted = true;
+ }
+ return [3 /*break*/, 6];
+ case 2:
+ if (!!state.isCompleted) return [3 /*break*/, 6];
+ _a.label = 3;
+ case 3:
+ _a.trys.push([3, 5, , 6]);
+ return [4 /*yield*/, state.blobClient.getProperties({ abortSignal: options.abortSignal })];
+ case 4:
+ result = _a.sent();
+ copyStatus = result.copyStatus, copyProgress = result.copyProgress;
+ prevCopyProgress = state.copyProgress;
+ if (copyProgress) {
+ state.copyProgress = copyProgress;
+ }
+ if (copyStatus === "pending" &&
+ copyProgress !== prevCopyProgress &&
+ typeof options.fireProgress === "function") {
+ // trigger in setTimeout, or swallow error?
+ options.fireProgress(state);
+ }
+ else if (copyStatus === "success") {
+ state.result = result;
+ state.isCompleted = true;
+ }
+ else if (copyStatus === "failed") {
+ state.error = new Error("Blob copy failed with reason: \"" + (result.copyStatusDescription || "unknown") + "\"");
+ state.isCompleted = true;
+ }
+ return [3 /*break*/, 6];
+ case 5:
+ err_1 = _a.sent();
+ state.error = err_1;
+ state.isCompleted = true;
+ return [3 /*break*/, 6];
+ case 6: return [2 /*return*/, makeBlobBeginCopyFromURLPollOperation(state)];
+ }
});
+ });
+};
+/**
+ * Note: Intentionally using function expression over arrow function expression
+ * so that the function can be invoked with a different context.
+ * This affects what `this` refers to.
+ * @ignore
+ */
+var toString = function toString() {
+ return JSON.stringify({ state: this.state }, function (key, value) {
+ // remove blobClient from serialized state since a client can't be hydrated from this info.
+ if (key === "blobClient") {
+ return undefined;
+ }
+ return value;
+ });
+};
+/**
+ * Creates a poll operation given the provided state.
+ * @ignore
+ */
+function makeBlobBeginCopyFromURLPollOperation(state) {
+ return {
+ state: tslib.__assign({}, state),
+ cancel: cancel,
+ toString: toString,
+ update: update
};
+}
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+/**
+ * Generate a range string. For example:
+ *
+ * "bytes=255-" or "bytes=0-511"
+ *
+ * @export
+ * @param {Range} iRange
+ * @returns {string}
+ */
+function rangeToString(iRange) {
+ if (iRange.offset < 0) {
+ throw new RangeError("Range.offset cannot be smaller than 0.");
+ }
+ if (iRange.count && iRange.count <= 0) {
+ throw new RangeError("Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.");
+ }
+ return iRange.count
+ ? "bytes=" + iRange.offset + "-" + (iRange.offset + iRange.count - 1)
+ : "bytes=" + iRange.offset + "-";
+}
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+/**
+ * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient}
+ * and etc.
+ *
+ * @export
+ * @class StorageClient
+ */
+var StorageClient = /** @class */ (function () {
/**
- * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not
- * return a response until the copy is complete.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url
+ * Creates an instance of StorageClient.
+ * @param {string} url url to resource
+ * @param {Pipeline} pipeline request policy pipeline.
+ * @memberof StorageClient
+ */
+ function StorageClient(url, pipeline) {
+ // URL should be encoded and only once, protocol layer shouldn't encode URL again
+ this.url = escapeURLPath(url);
+ this.accountName = getAccountNameFromUrl(url);
+ this.pipeline = pipeline;
+ this.storageClientContext = new StorageClientContext(this.url, pipeline.toServiceClientOptions());
+ this.isHttps = iEqual(getURLScheme(this.url) || "", "https");
+ this.credential = new AnonymousCredential();
+ for (var _i = 0, _a = this.pipeline.factories; _i < _a.length; _i++) {
+ var factory = _a[_i];
+ if ((coreHttp.isNode && factory instanceof StorageSharedKeyCredential) ||
+ factory instanceof AnonymousCredential ||
+ coreHttp.isTokenCredential(factory)) {
+ this.credential = factory;
+ }
+ }
+ // Override protocol layer's default content-type
+ var storageClientContext = this.storageClientContext;
+ storageClientContext.requestContentType = undefined;
+ }
+ return StorageClient;
+}());
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+/**
+ * States for Batch.
+ *
+ * @enum {number}
+ */
+var BatchStates;
+(function (BatchStates) {
+ BatchStates[BatchStates["Good"] = 0] = "Good";
+ BatchStates[BatchStates["Error"] = 1] = "Error";
+})(BatchStates || (BatchStates = {}));
+/**
+ * Batch provides basic parallel execution with concurrency limits.
+ * Will stop execute left operations when one of the executed operation throws an error.
+ * But Batch cannot cancel ongoing operations, you need to cancel them by yourself.
+ *
+ * @export
+ * @class Batch
+ */
+var Batch = /** @class */ (function () {
+ /**
+ * Creates an instance of Batch.
+ * @param {number} [concurrency=5]
+ * @memberof Batch
+ */
+ function Batch(concurrency) {
+ if (concurrency === void 0) { concurrency = 5; }
+ /**
+ * Number of active operations under execution.
+ *
+ * @private
+ * @type {number}
+ * @memberof Batch
+ */
+ this.actives = 0;
+ /**
+ * Number of completed operations under execution.
+ *
+ * @private
+ * @type {number}
+ * @memberof Batch
+ */
+ this.completed = 0;
+ /**
+ * Offset of next operation to be executed.
+ *
+ * @private
+ * @type {number}
+ * @memberof Batch
+ */
+ this.offset = 0;
+ /**
+ * Operation array to be executed.
+ *
+ * @private
+ * @type {Operation[]}
+ * @memberof Batch
+ */
+ this.operations = [];
+ /**
+ * States of Batch. When an error happens, state will turn into error.
+ * Batch will stop execute left operations.
+ *
+ * @private
+ * @type {BatchStates}
+ * @memberof Batch
+ */
+ this.state = BatchStates.Good;
+ if (concurrency < 1) {
+ throw new RangeError("concurrency must be larger than 0");
+ }
+ this.concurrency = concurrency;
+ this.emitter = new events.EventEmitter();
+ }
+ /**
+ * Add a operation into queue.
*
- * @param {string} copySource The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication
- * @param {BlobSyncCopyFromURLOptions} [options={}]
- * @returns {Promise}
- * @memberof BlobClient
+ * @param {Operation} operation
+ * @memberof Batch
*/
- BlobClient.prototype.syncCopyFromURL = function (copySource, options) {
- var _a;
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, e_13;
- return tslib.__generator(this, function (_c) {
- switch (_c.label) {
+ Batch.prototype.addOperation = function (operation) {
+ var _this = this;
+ this.operations.push(function () { return tslib.__awaiter(_this, void 0, void 0, function () {
+ var error_1;
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
case 0:
- _b = createSpan("BlobClient-syncCopyFromURL", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
- options.conditions = options.conditions || {};
- options.sourceConditions = options.sourceConditions || {};
- _c.label = 1;
+ _a.trys.push([0, 2, , 3]);
+ this.actives++;
+ return [4 /*yield*/, operation()];
case 1:
- _c.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.blobContext.copyFromURL(copySource, {
- abortSignal: options.abortSignal,
- metadata: options.metadata,
- leaseAccessConditions: options.conditions,
- modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
- sourceModifiedAccessConditions: {
- sourceIfMatch: options.sourceConditions.ifMatch,
- sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,
- sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,
- sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince
- },
- sourceContentMD5: options.sourceContentMD5,
- blobTagsString: toBlobTagsString(options.tags),
- spanOptions: spanOptions
- })];
- case 2: return [2 /*return*/, _c.sent()];
- case 3:
- e_13 = _c.sent();
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_13.message
- });
- throw e_13;
- case 4:
- span.end();
- return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
+ _a.sent();
+ this.actives--;
+ this.completed++;
+ this.parallelExecute();
+ return [3 /*break*/, 3];
+ case 2:
+ error_1 = _a.sent();
+ this.emitter.emit("error", error_1);
+ return [3 /*break*/, 3];
+ case 3: return [2 /*return*/];
}
});
- });
+ }); });
};
/**
- * Sets the tier on a blob. The operation is allowed on a page blob in a premium
- * storage account and on a block blob in a blob storage account (locally redundant
- * storage only). A premium page blob's tier determines the allowed size, IOPS,
- * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive
- * storage type. This operation does not update the blob's ETag.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier
+ * Start execute operations in the queue.
*
- * @param {BlockBlobTier | PremiumPageBlobTier | string} tier The tier to be set on the blob. Valid values are Hot, Cool, or Archive.
- * @param {BlobSetTierOptions} [options] Optional options to the Blob Set Tier operation.
- * @returns {Promise}
- * @memberof BlobClient
+ * @returns {Promise}
+ * @memberof Batch
*/
- BlobClient.prototype.setAccessTier = function (tier, options) {
- var _a;
- if (options === void 0) { options = {}; }
+ Batch.prototype.do = function () {
return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, e_14;
- return tslib.__generator(this, function (_c) {
- switch (_c.label) {
- case 0:
- _b = createSpan("BlobClient-setAccessTier", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
- _c.label = 1;
- case 1:
- _c.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.blobContext.setTier(toAccessTier(tier), {
- abortSignal: options.abortSignal,
- leaseAccessConditions: options.conditions,
- modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
- rehydratePriority: options.rehydratePriority,
- spanOptions: spanOptions
- })];
- case 2: return [2 /*return*/, _c.sent()];
- case 3:
- e_14 = _c.sent();
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_14.message
- });
- throw e_14;
- case 4:
- span.end();
- return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
- }
- });
- });
- };
- BlobClient.prototype.downloadToBuffer = function (param1, param2, param3, param4) {
- if (param4 === void 0) { param4 = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var buffer, offset, count, options, _a, span, spanOptions, response, transferProgress_1, batch, _loop_1, off, e_15;
- var _this = this;
- return tslib.__generator(this, function (_b) {
- switch (_b.label) {
- case 0:
- offset = 0;
- count = 0;
- options = param4;
- if (param1 instanceof Buffer) {
- buffer = param1;
- offset = param2 || 0;
- count = typeof param3 === "number" ? param3 : 0;
- }
- else {
- offset = typeof param1 === "number" ? param1 : 0;
- count = typeof param2 === "number" ? param2 : 0;
- options = param3 || {};
- }
- _a = createSpan("BlobClient-downloadToBuffer", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
- _b.label = 1;
- case 1:
- _b.trys.push([1, 5, 6, 7]);
- if (!options.blockSize) {
- options.blockSize = 0;
- }
- if (options.blockSize < 0) {
- throw new RangeError("blockSize option must be >= 0");
- }
- if (options.blockSize === 0) {
- options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES;
- }
- if (offset < 0) {
- throw new RangeError("offset option must be >= 0");
- }
- if (count && count <= 0) {
- throw new RangeError("count option must be > 0");
- }
- if (!options.conditions) {
- options.conditions = {};
- }
- if (!!count) return [3 /*break*/, 3];
- return [4 /*yield*/, this.getProperties(tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
- case 2:
- response = _b.sent();
- count = response.contentLength - offset;
- if (count < 0) {
- throw new RangeError("offset " + offset + " shouldn't be larger than blob size " + response.contentLength);
- }
- _b.label = 3;
- case 3:
- // Allocate the buffer of size = count if the buffer is not provided
- if (!buffer) {
- try {
- buffer = Buffer.alloc(count);
- }
- catch (error) {
- throw new Error("Unable to allocate the buffer of size: " + count + "(in bytes). Please try passing your own buffer to the \"downloadToBuffer\" method or try using other methods like \"download\" or \"downloadToFile\".\t " + error.message);
- }
- }
- if (buffer.length < count) {
- throw new RangeError("The buffer's size should be equal to or larger than the request count of bytes: " + count);
- }
- transferProgress_1 = 0;
- batch = new Batch(options.concurrency);
- _loop_1 = function (off) {
- batch.addOperation(function () { return tslib.__awaiter(_this, void 0, void 0, function () {
- var chunkEnd, response, stream;
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0:
- chunkEnd = offset + count;
- if (off + options.blockSize < chunkEnd) {
- chunkEnd = off + options.blockSize;
- }
- return [4 /*yield*/, this.download(off, chunkEnd - off, {
- abortSignal: options.abortSignal,
- conditions: options.conditions,
- maxRetryRequests: options.maxRetryRequestsPerBlock,
- customerProvidedKey: options.customerProvidedKey,
- tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions })
- })];
- case 1:
- response = _a.sent();
- stream = response.readableStreamBody;
- return [4 /*yield*/, streamToBuffer(stream, buffer, off - offset, chunkEnd - offset)];
- case 2:
- _a.sent();
- // Update progress after block is downloaded, in case of block trying
- // Could provide finer grained progress updating inside HTTP requests,
- // only if convenience layer download try is enabled
- transferProgress_1 += chunkEnd - off;
- if (options.onProgress) {
- options.onProgress({ loadedBytes: transferProgress_1 });
- }
- return [2 /*return*/];
- }
- });
- }); });
- };
- for (off = offset; off < offset + count; off = off + options.blockSize) {
- _loop_1(off);
- }
- return [4 /*yield*/, batch.do()];
- case 4:
- _b.sent();
- return [2 /*return*/, buffer];
- case 5:
- e_15 = _b.sent();
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_15.message
- });
- throw e_15;
- case 6:
- span.end();
- return [7 /*endfinally*/];
- case 7: return [2 /*return*/];
+ var _this = this;
+ return tslib.__generator(this, function (_a) {
+ if (this.operations.length === 0) {
+ return [2 /*return*/, Promise.resolve()];
}
+ this.parallelExecute();
+ return [2 /*return*/, new Promise(function (resolve, reject) {
+ _this.emitter.on("finish", resolve);
+ _this.emitter.on("error", function (error) {
+ _this.state = BatchStates.Error;
+ reject(error);
+ });
+ })];
});
});
};
/**
- * ONLY AVAILABLE IN NODE.JS RUNTIME.
- *
- * Downloads an Azure Blob to a local file.
- * Fails if the the given file path already exits.
- * Offset and count are optional, pass 0 and undefined respectively to download the entire blob.
+ * Get next operation to be executed. Return null when reaching ends.
*
- * @param {string} filePath
- * @param {number} [offset] From which position of the block blob to download.
- * @param {number} [count] How much data to be downloaded. Will download to the end when passing undefined.
- * @param {BlobDownloadOptions} [options] Options to Blob download options.
- * @returns {Promise} The response data for blob download operation,
- * but with readableStreamBody set to undefined since its
- * content is already read and written into a local file
- * at the specified path.
- * @memberof BlobClient
+ * @private
+ * @returns {(Operation | null)}
+ * @memberof Batch
*/
- BlobClient.prototype.downloadToFile = function (filePath, offset, count, options) {
- if (offset === void 0) { offset = 0; }
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, response, e_16;
- return tslib.__generator(this, function (_b) {
- switch (_b.label) {
- case 0:
- _a = createSpan("BlobClient-downloadToFile", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
- _b.label = 1;
- case 1:
- _b.trys.push([1, 5, 6, 7]);
- return [4 /*yield*/, this.download(offset, count, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
- case 2:
- response = _b.sent();
- if (!response.readableStreamBody) return [3 /*break*/, 4];
- return [4 /*yield*/, readStreamToLocalFile(response.readableStreamBody, filePath)];
- case 3:
- _b.sent();
- _b.label = 4;
- case 4:
- // The stream is no longer accessible so setting it to undefined.
- response.blobDownloadStream = undefined;
- return [2 /*return*/, response];
- case 5:
- e_16 = _b.sent();
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_16.message
- });
- throw e_16;
- case 6:
- span.end();
- return [7 /*endfinally*/];
- case 7: return [2 /*return*/];
- }
- });
- });
- };
- BlobClient.prototype.getBlobAndContainerNamesFromUrl = function () {
- var containerName;
- var blobName;
- try {
- // URL may look like the following
- // "https://myaccount.blob.core.windows.net/mycontainer/blob?sasString";
- // "https://myaccount.blob.core.windows.net/mycontainer/blob";
- // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt?sasString";
- // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt";
- // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob`
- // http://localhost:10001/devstoreaccount1/containername/blob
- var parsedUrl = coreHttp.URLBuilder.parse(this.url);
- if (parsedUrl.getHost().split(".")[1] === "blob") {
- // "https://myaccount.blob.core.windows.net/containername/blob".
- // .getPath() -> /containername/blob
- var pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?");
- containerName = pathComponents[1];
- blobName = pathComponents[3];
- }
- else if (isIpEndpointStyle(parsedUrl)) {
- // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob
- // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob
- // .getPath() -> /devstoreaccount1/containername/blob
- var pathComponents = parsedUrl.getPath().match("/([^/]*)/([^/]*)(/(.*))?");
- containerName = pathComponents[2];
- blobName = pathComponents[4];
- }
- else {
- // "https://customdomain.com/containername/blob".
- // .getPath() -> /containername/blob
- var pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?");
- containerName = pathComponents[1];
- blobName = pathComponents[3];
- }
- // decode the encoded blobName, containerName - to get all the special characters that might be present in them
- containerName = decodeURIComponent(containerName);
- blobName = decodeURIComponent(blobName);
- // Azure Storage Server will replace "\" with "/" in the blob names
- // doing the same in the SDK side so that the user doesn't have to replace "\" instances in the blobName
- blobName = blobName.replace(/\\/g, "/");
- if (!blobName) {
- throw new Error("Provided blobName is invalid.");
- }
- else if (!containerName) {
- throw new Error("Provided containerName is invalid.");
- }
- return { blobName: blobName, containerName: containerName };
- }
- catch (error) {
- throw new Error("Unable to extract blobName and containerName with provided information.");
+ Batch.prototype.nextOperation = function () {
+ if (this.offset < this.operations.length) {
+ return this.operations[this.offset++];
}
+ return null;
};
/**
- * Asynchronously copies a blob to a destination within the storage account.
- * In version 2012-02-12 and later, the source for a Copy Blob operation can be
- * a committed blob in any Azure storage account.
- * Beginning with version 2015-02-21, the source for a Copy Blob operation can be
- * an Azure file in any Azure storage account.
- * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob
- * operation to copy from another storage account.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob
+ * Start execute operations. One one the most important difference between
+ * this method with do() is that do() wraps as an sync method.
*
- * @param {string} copySource url to the source Azure Blob/File.
- * @param {BlobStartCopyFromURLOptions} [options] Optional options to the Blob Start Copy From URL operation.
- * @returns {Promise}
- * @memberof BlobClient
+ * @private
+ * @returns {void}
+ * @memberof Batch
*/
- BlobClient.prototype.startCopyFromURL = function (copySource, options) {
- var _a;
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, e_17;
- return tslib.__generator(this, function (_c) {
- switch (_c.label) {
- case 0:
- _b = createSpan("BlobClient-startCopyFromURL", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
- options.conditions = options.conditions || {};
- options.sourceConditions = options.sourceConditions || {};
- _c.label = 1;
- case 1:
- _c.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.blobContext.startCopyFromURL(copySource, {
- abortSignal: options.abortSignal,
- leaseAccessConditions: options.conditions,
- metadata: options.metadata,
- modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
- sourceModifiedAccessConditions: {
- sourceIfMatch: options.sourceConditions.ifMatch,
- sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,
- sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,
- sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,
- sourceIfTags: options.sourceConditions.tagConditions
- },
- rehydratePriority: options.rehydratePriority,
- tier: toAccessTier(options.tier),
- blobTagsString: toBlobTagsString(options.tags),
- sealBlob: options.sealBlob,
- spanOptions: spanOptions
- })];
- case 2: return [2 /*return*/, _c.sent()];
- case 3:
- e_17 = _c.sent();
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_17.message
- });
- throw e_17;
- case 4:
- span.end();
- return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
- }
- });
- });
+ Batch.prototype.parallelExecute = function () {
+ if (this.state === BatchStates.Error) {
+ return;
+ }
+ if (this.completed >= this.operations.length) {
+ this.emitter.emit("finish");
+ return;
+ }
+ while (this.actives < this.concurrency) {
+ var operation = this.nextOperation();
+ if (operation) {
+ operation();
+ }
+ else {
+ return;
+ }
+ }
};
- return BlobClient;
-}(StorageClient));
+ return Batch;
+}());
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
/**
- * AppendBlobClient defines a set of operations applicable to append blobs.
+ * This class generates a readable stream from the data in an array of buffers.
*
* @export
- * @class AppendBlobClient
- * @extends {BlobClient}
+ * @class BuffersStream
*/
-var AppendBlobClient = /** @class */ (function (_super) {
- tslib.__extends(AppendBlobClient, _super);
- function AppendBlobClient(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) {
- var _this = this;
- // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.
- // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);
- var pipeline;
- var url;
- options = options || {};
- if (credentialOrPipelineOrContainerName instanceof Pipeline) {
- // (url: string, pipeline: Pipeline)
- url = urlOrConnectionString;
- pipeline = credentialOrPipelineOrContainerName;
+var BuffersStream = /** @class */ (function (_super) {
+ tslib.__extends(BuffersStream, _super);
+ /**
+ * Creates an instance of BuffersStream that will emit the data
+ * contained in the array of buffers.
+ *
+ * @param {Buffer[]} buffers Array of buffers containing the data
+ * @param {number} byteLength The total length of data contained in the buffers
+ * @memberof BuffersStream
+ */
+ function BuffersStream(buffers, byteLength, options) {
+ var _this = _super.call(this, options) || this;
+ _this.buffers = buffers;
+ _this.byteLength = byteLength;
+ _this.byteOffsetInCurrentBuffer = 0;
+ _this.bufferIndex = 0;
+ _this.pushedBytesLength = 0;
+ // check byteLength is no larger than buffers[] total length
+ var buffersLength = 0;
+ for (var _i = 0, _a = _this.buffers; _i < _a.length; _i++) {
+ var buf = _a[_i];
+ buffersLength += buf.byteLength;
}
- else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||
- credentialOrPipelineOrContainerName instanceof AnonymousCredential ||
- coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) {
- // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString;
- url = urlOrConnectionString;
- options = blobNameOrOptions;
- pipeline = newPipeline(credentialOrPipelineOrContainerName, options);
+ if (buffersLength < _this.byteLength) {
+ throw new Error("Data size shouldn't be larger than the total length of buffers.");
}
- else if (!credentialOrPipelineOrContainerName &&
- typeof credentialOrPipelineOrContainerName !== "string") {
- // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)
- url = urlOrConnectionString;
- // The second parameter is undefined. Use anonymous credential.
- pipeline = newPipeline(new AnonymousCredential(), options);
+ return _this;
+ }
+ /**
+ * Internal _read() that will be called when the stream wants to pull more data in.
+ *
+ * @param {number} size Optional. The size of data to be read
+ * @memberof BuffersStream
+ */
+ BuffersStream.prototype._read = function (size) {
+ if (this.pushedBytesLength >= this.byteLength) {
+ this.push(null);
}
- else if (credentialOrPipelineOrContainerName &&
- typeof credentialOrPipelineOrContainerName === "string" &&
- blobNameOrOptions &&
- typeof blobNameOrOptions === "string") {
- // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)
- var containerName = credentialOrPipelineOrContainerName;
- var blobName = blobNameOrOptions;
- var extractedCreds = extractConnectionStringParts(urlOrConnectionString);
- if (extractedCreds.kind === "AccountConnString") {
- {
- var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);
- url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName));
- options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri);
- pipeline = newPipeline(sharedKeyCredential, options);
- }
- }
- else if (extractedCreds.kind === "SASConnString") {
- url =
- appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) +
- "?" +
- extractedCreds.accountSas;
- pipeline = newPipeline(new AnonymousCredential(), options);
+ if (!size) {
+ size = this.readableHighWaterMark;
+ }
+ var outBuffers = [];
+ var i = 0;
+ while (i < size && this.pushedBytesLength < this.byteLength) {
+ // The last buffer may be longer than the data it contains.
+ var remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength;
+ var remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer;
+ var remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers);
+ if (remaining > size - i) {
+ // chunkSize = size - i
+ var end = this.byteOffsetInCurrentBuffer + size - i;
+ outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end));
+ this.pushedBytesLength += size - i;
+ this.byteOffsetInCurrentBuffer = end;
+ i = size;
+ break;
}
else {
- throw new Error("Connection string must be either an Account connection string or a SAS connection string");
+ // chunkSize = remaining
+ var end = this.byteOffsetInCurrentBuffer + remaining;
+ outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end));
+ if (remaining === remainingCapacityInThisBuffer) {
+ // this.buffers[this.bufferIndex] used up, shift to next one
+ this.byteOffsetInCurrentBuffer = 0;
+ this.bufferIndex++;
+ }
+ else {
+ this.byteOffsetInCurrentBuffer = end;
+ }
+ this.pushedBytesLength += remaining;
+ i += remaining;
}
}
- else {
- throw new Error("Expecting non-empty strings for containerName and blobName parameters");
+ if (outBuffers.length > 1) {
+ this.push(Buffer.concat(outBuffers));
+ }
+ else if (outBuffers.length === 1) {
+ this.push(outBuffers[0]);
+ }
+ };
+ return BuffersStream;
+}(stream.Readable));
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+/**
+ * maxBufferLength is max size of each buffer in the pooled buffers.
+ */
+// Can't use import as Typescript doesn't recognize "buffer".
+var maxBufferLength = __webpack_require__(293).constants.MAX_LENGTH;
+/**
+ * This class provides a buffer container which conceptually has no hard size limit.
+ * It accepts a capacity, an array of input buffers and the total length of input data.
+ * It will allocate an internal "buffer" of the capacity and fill the data in the input buffers
+ * into the internal "buffer" serially with respect to the total length.
+ * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream
+ * assembled from all the data in the internal "buffer".
+ *
+ * @export
+ * @class BufferScheduler
+ */
+var PooledBuffer = /** @class */ (function () {
+ function PooledBuffer(capacity, buffers, totalLength) {
+ /**
+ * Internal buffers used to keep the data.
+ * Each buffer has a length of the maxBufferLength except last one.
+ *
+ * @private
+ * @type {Buffer[]}
+ * @memberof PooledBuffer
+ */
+ this.buffers = [];
+ this.capacity = capacity;
+ this._size = 0;
+ // allocate
+ var bufferNum = Math.ceil(capacity / maxBufferLength);
+ for (var i = 0; i < bufferNum; i++) {
+ var len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength;
+ if (len === 0) {
+ len = maxBufferLength;
+ }
+ this.buffers.push(Buffer.allocUnsafe(len));
+ }
+ if (buffers) {
+ this.fill(buffers, totalLength);
}
- _this = _super.call(this, url, pipeline) || this;
- _this.appendBlobContext = new AppendBlob(_this.storageClientContext);
- return _this;
}
+ Object.defineProperty(PooledBuffer.prototype, "size", {
+ /**
+ * The size of the data contained in the pooled buffers.
+ */
+ get: function () {
+ return this._size;
+ },
+ enumerable: false,
+ configurable: true
+ });
/**
- * Creates a new AppendBlobClient object identical to the source but with the
- * specified snapshot timestamp.
- * Provide "" will remove the snapshot and return a Client to the base blob.
+ * Fill the internal buffers with data in the input buffers serially
+ * with respect to the total length and the total capacity of the internal buffers.
+ * Data copied will be shift out of the input buffers.
*
- * @param {string} snapshot The snapshot timestamp.
- * @returns {AppendBlobClient} A new AppendBlobClient object identical to the source but with the specified snapshot timestamp.
- * @memberof AppendBlobClient
- */
- AppendBlobClient.prototype.withSnapshot = function (snapshot) {
- return new AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline);
- };
- /**
- * Creates a 0-length append blob. Call AppendBlock to append data to an append blob.
- * @see https://docs.microsoft.com/rest/api/storageservices/put-blob
+ * @param {Buffer[]} buffers Input buffers containing the data to be filled in the pooled buffer
+ * @param {number} totalLength Total length of the data to be filled in.
*
- * @param {AppendBlobCreateOptions} [options] Options to the Append Block Create operation.
- * @returns {Promise}
- * @memberof AppendBlobClient
+ * @returns {void}
+ * @memberof PooledBuffer
+ */
+ PooledBuffer.prototype.fill = function (buffers, totalLength) {
+ this._size = Math.min(this.capacity, totalLength);
+ var i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0;
+ while (totalCopiedNum < this._size) {
+ var source = buffers[i];
+ var target = this.buffers[j];
+ var copiedNum = source.copy(target, targetOffset, sourceOffset);
+ totalCopiedNum += copiedNum;
+ sourceOffset += copiedNum;
+ targetOffset += copiedNum;
+ if (sourceOffset === source.length) {
+ i++;
+ sourceOffset = 0;
+ }
+ if (targetOffset === target.length) {
+ j++;
+ targetOffset = 0;
+ }
+ }
+ // clear copied from source buffers
+ buffers.splice(0, i);
+ if (buffers.length > 0) {
+ buffers[0] = buffers[0].slice(sourceOffset);
+ }
+ };
+ /**
+ * Get the readable stream assembled from all the data in the internal buffers.
*
- * Example usage:
+ * @returns {Readable}
+ * @memberof PooledBuffer
+ */
+ PooledBuffer.prototype.getReadableStream = function () {
+ return new BuffersStream(this.buffers, this.size);
+ };
+ return PooledBuffer;
+}());
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+/**
+ * This class accepts a Node.js Readable stream as input, and keeps reading data
+ * from the stream into the internal buffer structure, until it reaches maxBuffers.
+ * Every available buffer will try to trigger outgoingHandler.
+ *
+ * The internal buffer structure includes an incoming buffer array, and a outgoing
+ * buffer array. The incoming buffer array includes the "empty" buffers can be filled
+ * with new incoming data. The outgoing array includes the filled buffers to be
+ * handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize.
+ *
+ * NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING
+ *
+ * NUM_OF_ALL_BUFFERS <= maxBuffers
+ *
+ * PERFORMANCE IMPROVEMENT TIPS:
+ * 1. Input stream highWaterMark is better to set a same value with bufferSize
+ * parameter, which will avoid Buffer.concat() operations.
+ * 2. concurrency should set a smaller value than maxBuffers, which is helpful to
+ * reduce the possibility when a outgoing handler waits for the stream data.
+ * in this situation, outgoing handlers are blocked.
+ * Outgoing queue shouldn't be empty.
+ * @export
+ * @class BufferScheduler
+ */
+var BufferScheduler = /** @class */ (function () {
+ /**
+ * Creates an instance of BufferScheduler.
*
- * ```js
- * const appendBlobClient = containerClient.getAppendBlobClient("");
- * await appendBlobClient.create();
- * ```
+ * @param {Readable} readable A Node.js Readable stream
+ * @param {number} bufferSize Buffer size of every maintained buffer
+ * @param {number} maxBuffers How many buffers can be allocated
+ * @param {OutgoingHandler} outgoingHandler An async function scheduled to be
+ * triggered when a buffer fully filled
+ * with stream data
+ * @param {number} concurrency Concurrency of executing outgoingHandlers (>0)
+ * @param {string} [encoding] [Optional] Encoding of Readable stream when it's a string stream
+ * @memberof BufferScheduler
*/
- AppendBlobClient.prototype.create = function (options) {
- var _a;
- if (options === void 0) { options = {}; }
+ function BufferScheduler(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) {
+ /**
+ * An internal event emitter.
+ *
+ * @private
+ * @type {EventEmitter}
+ * @memberof BufferScheduler
+ */
+ this.emitter = new events.EventEmitter();
+ /**
+ * An internal offset marker to track data offset in bytes of next outgoingHandler.
+ *
+ * @private
+ * @type {number}
+ * @memberof BufferScheduler
+ */
+ this.offset = 0;
+ /**
+ * An internal marker to track whether stream is end.
+ *
+ * @private
+ * @type {boolean}
+ * @memberof BufferScheduler
+ */
+ this.isStreamEnd = false;
+ /**
+ * An internal marker to track whether stream or outgoingHandler returns error.
+ *
+ * @private
+ * @type {boolean}
+ * @memberof BufferScheduler
+ */
+ this.isError = false;
+ /**
+ * How many handlers are executing.
+ *
+ * @private
+ * @type {number}
+ * @memberof BufferScheduler
+ */
+ this.executingOutgoingHandlers = 0;
+ /**
+ * How many buffers have been allocated.
+ *
+ * @private
+ * @type {number}
+ * @memberof BufferScheduler
+ */
+ this.numBuffers = 0;
+ /**
+ * Because this class doesn't know how much data every time stream pops, which
+ * is defined by highWaterMarker of the stream. So BufferScheduler will cache
+ * data received from the stream, when data in unresolvedDataArray exceeds the
+ * blockSize defined, it will try to concat a blockSize of buffer, fill into available
+ * buffers from incoming and push to outgoing array.
+ *
+ * @private
+ * @type {Buffer[]}
+ * @memberof BufferScheduler
+ */
+ this.unresolvedDataArray = [];
+ /**
+ * How much data consisted in unresolvedDataArray.
+ *
+ * @private
+ * @type {number}
+ * @memberof BufferScheduler
+ */
+ this.unresolvedLength = 0;
+ /**
+ * The array includes all the available buffers can be used to fill data from stream.
+ *
+ * @private
+ * @type {PooledBuffer[]}
+ * @memberof BufferScheduler
+ */
+ this.incoming = [];
+ /**
+ * The array (queue) includes all the buffers filled from stream data.
+ *
+ * @private
+ * @type {PooledBuffer[]}
+ * @memberof BufferScheduler
+ */
+ this.outgoing = [];
+ if (bufferSize <= 0) {
+ throw new RangeError("bufferSize must be larger than 0, current is " + bufferSize);
+ }
+ if (maxBuffers <= 0) {
+ throw new RangeError("maxBuffers must be larger than 0, current is " + maxBuffers);
+ }
+ if (concurrency <= 0) {
+ throw new RangeError("concurrency must be larger than 0, current is " + concurrency);
+ }
+ this.bufferSize = bufferSize;
+ this.maxBuffers = maxBuffers;
+ this.readable = readable;
+ this.outgoingHandler = outgoingHandler;
+ this.concurrency = concurrency;
+ this.encoding = encoding;
+ }
+ /**
+ * Start the scheduler, will return error when stream of any of the outgoingHandlers
+ * returns error.
+ *
+ * @returns {Promise}
+ * @memberof BufferScheduler
+ */
+ BufferScheduler.prototype.do = function () {
return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, e_18;
- return tslib.__generator(this, function (_c) {
- switch (_c.label) {
- case 0:
- _b = createSpan("AppendBlobClient-create", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
- options.conditions = options.conditions || {};
- _c.label = 1;
- case 1:
- _c.trys.push([1, 3, 4, 5]);
- ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
- return [4 /*yield*/, this.appendBlobContext.create(0, {
- abortSignal: options.abortSignal,
- blobHTTPHeaders: options.blobHTTPHeaders,
- leaseAccessConditions: options.conditions,
- metadata: options.metadata,
- modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
- cpkInfo: options.customerProvidedKey,
- encryptionScope: options.encryptionScope,
- blobTagsString: toBlobTagsString(options.tags),
- spanOptions: spanOptions
- })];
- case 2: return [2 /*return*/, _c.sent()];
- case 3:
- e_18 = _c.sent();
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_18.message
+ var _this = this;
+ return tslib.__generator(this, function (_a) {
+ return [2 /*return*/, new Promise(function (resolve, reject) {
+ _this.readable.on("data", function (data) {
+ data = typeof data === "string" ? Buffer.from(data, _this.encoding) : data;
+ _this.appendUnresolvedData(data);
+ if (!_this.resolveData()) {
+ _this.readable.pause();
+ }
});
- throw e_18;
- case 4:
- span.end();
- return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
- }
+ _this.readable.on("error", function (err) {
+ _this.emitter.emit("error", err);
+ });
+ _this.readable.on("end", function () {
+ _this.isStreamEnd = true;
+ _this.emitter.emit("checkEnd");
+ });
+ _this.emitter.on("error", function (err) {
+ _this.isError = true;
+ _this.readable.pause();
+ reject(err);
+ });
+ _this.emitter.on("checkEnd", function () {
+ if (_this.outgoing.length > 0) {
+ _this.triggerOutgoingHandlers();
+ return;
+ }
+ if (_this.isStreamEnd && _this.executingOutgoingHandlers === 0) {
+ if (_this.unresolvedLength > 0 && _this.unresolvedLength < _this.bufferSize) {
+ var buffer_1 = _this.shiftBufferFromUnresolvedDataArray();
+ _this.outgoingHandler(function () { return buffer_1.getReadableStream(); }, buffer_1.size, _this.offset)
+ .then(resolve)
+ .catch(reject);
+ }
+ else if (_this.unresolvedLength >= _this.bufferSize) {
+ return;
+ }
+ else {
+ resolve();
+ }
+ }
+ });
+ })];
});
});
};
/**
- * Creates a 0-length append blob. Call AppendBlock to append data to an append blob.
- * If the blob with the same name already exists, the content of the existing blob will remain unchanged.
- * @see https://docs.microsoft.com/rest/api/storageservices/put-blob
+ * Insert a new data into unresolved array.
*
- * @param {AppendBlobCreateIfNotExistsOptions} [options]
- * @returns {Promise}
- * @memberof AppendBlobClient
+ * @private
+ * @param {Buffer} data
+ * @memberof BufferScheduler
*/
- AppendBlobClient.prototype.createIfNotExists = function (options) {
- var _a, _b;
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var _c, span, spanOptions, conditions, res, e_19;
- return tslib.__generator(this, function (_d) {
- switch (_d.label) {
- case 0:
- _c = createSpan("AppendBlobClient-createIfNotExists", options.tracingOptions), span = _c.span, spanOptions = _c.spanOptions;
- conditions = { ifNoneMatch: ETagAny };
- _d.label = 1;
- case 1:
- _d.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.create(tslib.__assign(tslib.__assign({}, options), { conditions: conditions, tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
- case 2:
- res = _d.sent();
- return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable
- })];
- case 3:
- e_19 = _d.sent();
- if (((_a = e_19.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") {
- span.setStatus({
- code: api.CanonicalCode.ALREADY_EXISTS,
- message: "Expected exception when creating a blob only if it does not already exist."
- });
- return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_19.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_19.response })];
- }
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_19.message
- });
- throw e_19;
- case 4:
- span.end();
- return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
+ BufferScheduler.prototype.appendUnresolvedData = function (data) {
+ this.unresolvedDataArray.push(data);
+ this.unresolvedLength += data.length;
+ };
+ /**
+ * Try to shift a buffer with size in blockSize. The buffer returned may be less
+ * than blockSize when data in unresolvedDataArray is less than bufferSize.
+ *
+ * @private
+ * @returns {PooledBuffer}
+ * @memberof BufferScheduler
+ */
+ BufferScheduler.prototype.shiftBufferFromUnresolvedDataArray = function (buffer) {
+ if (!buffer) {
+ buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength);
+ }
+ else {
+ buffer.fill(this.unresolvedDataArray, this.unresolvedLength);
+ }
+ this.unresolvedLength -= buffer.size;
+ return buffer;
+ };
+ /**
+ * Resolve data in unresolvedDataArray. For every buffer with size in blockSize
+ * shifted, it will try to get (or allocate a buffer) from incoming, and fill it,
+ * then push it into outgoing to be handled by outgoing handler.
+ *
+ * Return false when available buffers in incoming are not enough, else true.
+ *
+ * @private
+ * @returns {boolean} Return false when buffers in incoming are not enough, else true.
+ * @memberof BufferScheduler
+ */
+ BufferScheduler.prototype.resolveData = function () {
+ while (this.unresolvedLength >= this.bufferSize) {
+ var buffer = void 0;
+ if (this.incoming.length > 0) {
+ buffer = this.incoming.shift();
+ this.shiftBufferFromUnresolvedDataArray(buffer);
+ }
+ else {
+ if (this.numBuffers < this.maxBuffers) {
+ buffer = this.shiftBufferFromUnresolvedDataArray();
+ this.numBuffers++;
}
- });
- });
+ else {
+ // No available buffer, wait for buffer returned
+ return false;
+ }
+ }
+ this.outgoing.push(buffer);
+ this.triggerOutgoingHandlers();
+ }
+ return true;
};
/**
- * Seals the append blob, making it read only.
+ * Try to trigger a outgoing handler for every buffer in outgoing. Stop when
+ * concurrency reaches.
*
- * @param {AppendBlobSealOptions} [options={}]
- * @returns {Promise}
- * @memberof AppendBlobClient
+ * @private
+ * @memberof BufferScheduler
*/
- AppendBlobClient.prototype.seal = function (options) {
- var _a;
- if (options === void 0) { options = {}; }
+ BufferScheduler.prototype.triggerOutgoingHandlers = function () {
return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, e_20;
- return tslib.__generator(this, function (_c) {
- switch (_c.label) {
- case 0:
- _b = createSpan("AppendBlobClient-seal", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
- options.conditions = options.conditions || {};
- _c.label = 1;
- case 1:
- _c.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.appendBlobContext.seal({
- abortSignal: options.abortSignal,
- appendPositionAccessConditions: options.conditions,
- leaseAccessConditions: options.conditions,
- modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
- spanOptions: spanOptions
- })];
- case 2: return [2 /*return*/, _c.sent()];
- case 3:
- e_20 = _c.sent();
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_20.message
- });
- throw e_20;
- case 4:
- span.end();
- return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
- }
+ var buffer;
+ return tslib.__generator(this, function (_a) {
+ do {
+ if (this.executingOutgoingHandlers >= this.concurrency) {
+ return [2 /*return*/];
+ }
+ buffer = this.outgoing.shift();
+ if (buffer) {
+ this.triggerOutgoingHandler(buffer);
+ }
+ } while (buffer);
+ return [2 /*return*/];
});
});
};
/**
- * Commits a new block of data to the end of the existing append blob.
- * @see https://docs.microsoft.com/rest/api/storageservices/append-block
- *
- * @param {HttpRequestBody} body Data to be appended.
- * @param {number} contentLength Length of the body in bytes.
- * @param {AppendBlobAppendBlockOptions} [options] Options to the Append Block operation.
- * @returns {Promise}
- * @memberof AppendBlobClient
- *
- * Example usage:
- *
- * ```js
- * const content = "Hello World!";
- *
- * // Create a new append blob and append data to the blob.
- * const newAppendBlobClient = containerClient.getAppendBlobClient("");
- * await newAppendBlobClient.create();
- * await newAppendBlobClient.appendBlock(content, content.length);
+ * Trigger a outgoing handler for a buffer shifted from outgoing.
*
- * // Append data to an existing append blob.
- * const existingAppendBlobClient = containerClient.getAppendBlobClient("");
- * await existingAppendBlobClient.appendBlock(content, content.length);
- * ```
+ * @private
+ * @param {Buffer} buffer
+ * @returns {Promise}
+ * @memberof BufferScheduler
*/
- AppendBlobClient.prototype.appendBlock = function (body, contentLength, options) {
- var _a;
- if (options === void 0) { options = {}; }
+ BufferScheduler.prototype.triggerOutgoingHandler = function (buffer) {
return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, e_21;
- return tslib.__generator(this, function (_c) {
- switch (_c.label) {
+ var bufferLength, err_1;
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
case 0:
- _b = createSpan("AppendBlobClient-appendBlock", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
- options.conditions = options.conditions || {};
- _c.label = 1;
+ bufferLength = buffer.size;
+ this.executingOutgoingHandlers++;
+ this.offset += bufferLength;
+ _a.label = 1;
case 1:
- _c.trys.push([1, 3, 4, 5]);
- ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
- return [4 /*yield*/, this.appendBlobContext.appendBlock(body, contentLength, {
- abortSignal: options.abortSignal,
- appendPositionAccessConditions: options.conditions,
- leaseAccessConditions: options.conditions,
- modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
- onUploadProgress: options.onProgress,
- transactionalContentMD5: options.transactionalContentMD5,
- transactionalContentCrc64: options.transactionalContentCrc64,
- cpkInfo: options.customerProvidedKey,
- encryptionScope: options.encryptionScope,
- spanOptions: spanOptions
- })];
- case 2: return [2 /*return*/, _c.sent()];
+ _a.trys.push([1, 3, , 4]);
+ return [4 /*yield*/, this.outgoingHandler(function () { return buffer.getReadableStream(); }, bufferLength, this.offset - bufferLength)];
+ case 2:
+ _a.sent();
+ return [3 /*break*/, 4];
case 3:
- e_21 = _c.sent();
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_21.message
- });
- throw e_21;
+ err_1 = _a.sent();
+ this.emitter.emit("error", err_1);
+ return [2 /*return*/];
case 4:
- span.end();
- return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
+ this.executingOutgoingHandlers--;
+ this.reuseBuffer(buffer);
+ this.emitter.emit("checkEnd");
+ return [2 /*return*/];
}
});
});
};
/**
- * The Append Block operation commits a new block of data to the end of an existing append blob
- * where the contents are read from a source url.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url
+ * Return buffer used by outgoing handler into incoming.
*
- * @param {string} sourceURL
- * The url to the blob that will be the source of the copy. A source blob in the same storage account can
- * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob
- * must either be public or must be authenticated via a shared access signature. If the source blob is
- * public, no authentication is required to perform the operation.
- * @param {number} sourceOffset Offset in source to be appended
- * @param {number} count Number of bytes to be appended as a block
- * @param {AppendBlobAppendBlockFromURLOptions} [options={}]
- * @returns {Promise}
- * @memberof AppendBlobClient
+ * @private
+ * @param {Buffer} buffer
+ * @memberof BufferScheduler
*/
- AppendBlobClient.prototype.appendBlockFromURL = function (sourceURL, sourceOffset, count, options) {
- var _a;
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, e_22;
- return tslib.__generator(this, function (_c) {
- switch (_c.label) {
- case 0:
- _b = createSpan("AppendBlobClient-appendBlockFromURL", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
- options.conditions = options.conditions || {};
- options.sourceConditions = options.sourceConditions || {};
- _c.label = 1;
- case 1:
- _c.trys.push([1, 3, 4, 5]);
- ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
- return [4 /*yield*/, this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, {
- abortSignal: options.abortSignal,
- sourceRange: rangeToString({ offset: sourceOffset, count: count }),
- sourceContentMD5: options.sourceContentMD5,
- sourceContentCrc64: options.sourceContentCrc64,
- leaseAccessConditions: options.conditions,
- appendPositionAccessConditions: options.conditions,
- modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
- sourceModifiedAccessConditions: {
- sourceIfMatch: options.sourceConditions.ifMatch,
- sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,
- sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,
- sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince
- },
- cpkInfo: options.customerProvidedKey,
- encryptionScope: options.encryptionScope,
- spanOptions: spanOptions
- })];
- case 2: return [2 /*return*/, _c.sent()];
- case 3:
- e_22 = _c.sent();
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_22.message
- });
- throw e_22;
- case 4:
- span.end();
- return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
- }
- });
- });
+ BufferScheduler.prototype.reuseBuffer = function (buffer) {
+ this.incoming.push(buffer);
+ if (!this.isError && this.resolveData() && !this.isStreamEnd) {
+ this.readable.resume();
+ }
};
- return AppendBlobClient;
-}(BlobClient));
+ return BufferScheduler;
+}());
+
+// Copyright (c) Microsoft Corporation.
/**
- * BlockBlobClient defines a set of operations applicable to block blobs.
+ * Creates a span using the global tracer.
+ * @param name The name of the operation being performed.
+ * @param tracingOptions The options for the underlying http request.
+ */
+function createSpan(operationName, tracingOptions) {
+ if (tracingOptions === void 0) { tracingOptions = {}; }
+ var tracer = coreTracing.getTracer();
+ var spanOptions = tslib.__assign(tslib.__assign({}, tracingOptions.spanOptions), { kind: api.SpanKind.INTERNAL });
+ var span = tracer.startSpan("Azure.Storage.Blob." + operationName, spanOptions);
+ span.setAttribute("az.namespace", "Microsoft.Storage");
+ var newOptions = tracingOptions.spanOptions || {};
+ if (span.isRecording()) {
+ newOptions = tslib.__assign(tslib.__assign({}, tracingOptions.spanOptions), { parent: span.context(), attributes: tslib.__assign(tslib.__assign({}, spanOptions.attributes), { "az.namespace": "Microsoft.Storage" }) });
+ }
+ return {
+ span: span,
+ spanOptions: newOptions
+ };
+}
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+/**
+ * Reads a readable stream into buffer. Fill the buffer from offset to end.
*
* @export
- * @class BlockBlobClient
- * @extends {BlobClient}
+ * @param {NodeJS.ReadableStream} stream A Node.js Readable stream
+ * @param {Buffer} buffer Buffer to be filled, length must >= offset
+ * @param {number} offset From which position in the buffer to be filled, inclusive
+ * @param {number} end To which position in the buffer to be filled, exclusive
+ * @param {string} [encoding] Encoding of the Readable stream
+ * @returns {Promise}
*/
-var BlockBlobClient = /** @class */ (function (_super) {
- tslib.__extends(BlockBlobClient, _super);
- function BlockBlobClient(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) {
+function streamToBuffer(stream, buffer, offset, end, encoding) {
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var pos, count;
+ return tslib.__generator(this, function (_a) {
+ pos = 0;
+ count = end - offset;
+ return [2 /*return*/, new Promise(function (resolve, reject) {
+ stream.on("readable", function () {
+ if (pos >= count) {
+ resolve();
+ return;
+ }
+ var chunk = stream.read();
+ if (!chunk) {
+ return;
+ }
+ if (typeof chunk === "string") {
+ chunk = Buffer.from(chunk, encoding);
+ }
+ // How much data needed in this chunk
+ var chunkLength = pos + chunk.length > count ? count - pos : chunk.length;
+ buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength);
+ pos += chunkLength;
+ });
+ stream.on("end", function () {
+ if (pos < count) {
+ reject(new Error("Stream drains before getting enough data needed. Data read: " + pos + ", data need: " + count));
+ }
+ resolve();
+ });
+ stream.on("error", reject);
+ })];
+ });
+ });
+}
+/**
+ * Reads a readable stream into buffer entirely.
+ *
+ * @export
+ * @param {NodeJS.ReadableStream} stream A Node.js Readable stream
+ * @param {Buffer} buffer Buffer to be filled, length must >= offset
+ * @param {string} [encoding] Encoding of the Readable stream
+ * @returns {Promise} with the count of bytes read.
+ * @throws {RangeError} If buffer size is not big enough.
+ */
+function streamToBuffer2(stream, buffer, encoding) {
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var pos, bufferSize;
+ return tslib.__generator(this, function (_a) {
+ pos = 0;
+ bufferSize = buffer.length;
+ return [2 /*return*/, new Promise(function (resolve, reject) {
+ stream.on("readable", function () {
+ var chunk = stream.read();
+ if (!chunk) {
+ return;
+ }
+ if (typeof chunk === "string") {
+ chunk = Buffer.from(chunk, encoding);
+ }
+ if (pos + chunk.length > bufferSize) {
+ reject(new Error("Stream exceeds buffer size. Buffer size: " + bufferSize));
+ return;
+ }
+ buffer.fill(chunk, pos, pos + chunk.length);
+ pos += chunk.length;
+ });
+ stream.on("end", function () {
+ resolve(pos);
+ });
+ stream.on("error", reject);
+ })];
+ });
+ });
+}
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ *
+ * Writes the content of a readstream to a local file. Returns a Promise which is completed after the file handle is closed.
+ *
+ * @export
+ * @param {NodeJS.ReadableStream} rs The read stream.
+ * @param {string} file Destination file path.
+ * @returns {Promise}
+ */
+function readStreamToLocalFile(rs, file) {
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ return tslib.__generator(this, function (_a) {
+ return [2 /*return*/, new Promise(function (resolve, reject) {
+ var ws = fs.createWriteStream(file);
+ rs.on("error", function (err) {
+ reject(err);
+ });
+ ws.on("error", function (err) {
+ reject(err);
+ });
+ ws.on("close", resolve);
+ rs.pipe(ws);
+ })];
+ });
+ });
+}
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ *
+ * Promisified version of fs.stat().
+ */
+var fsStat = util.promisify(fs.stat);
+var fsCreateReadStream = fs.createReadStream;
+
+/**
+ * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob,
+ * append blob, or page blob.
+ *
+ * @export
+ * @class BlobClient
+ */
+var BlobClient = /** @class */ (function (_super) {
+ tslib.__extends(BlobClient, _super);
+ function BlobClient(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) {
+ var _a;
var _this = this;
- // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.
- // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);
+ options = options || {};
var pipeline;
var url;
- options = options || {};
if (credentialOrPipelineOrContainerName instanceof Pipeline) {
// (url: string, pipeline: Pipeline)
url = urlOrConnectionString;
@@ -49210,92 +49078,233 @@ var BlockBlobClient = /** @class */ (function (_super) {
throw new Error("Expecting non-empty strings for containerName and blobName parameters");
}
_this = _super.call(this, url, pipeline) || this;
- _this.blockBlobContext = new BlockBlob(_this.storageClientContext);
- _this._blobContext = new Blob$1(_this.storageClientContext);
+ (_a = _this.getBlobAndContainerNamesFromUrl(), _this._name = _a.blobName, _this._containerName = _a.containerName);
+ _this.blobContext = new Blob$1(_this.storageClientContext);
return _this;
}
+ Object.defineProperty(BlobClient.prototype, "name", {
+ /**
+ * The name of the blob.
+ */
+ get: function () {
+ return this._name;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobClient.prototype, "containerName", {
+ /**
+ * The name of the storage container the blob is associated with.
+ */
+ get: function () {
+ return this._containerName;
+ },
+ enumerable: false,
+ configurable: true
+ });
/**
- * Creates a new BlockBlobClient object identical to the source but with the
- * specified snapshot timestamp.
- * Provide "" will remove the snapshot and return a URL to the base blob.
+ * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp.
+ * Provide "" will remove the snapshot and return a Client to the base blob.
*
* @param {string} snapshot The snapshot timestamp.
- * @returns {BlockBlobClient} A new BlockBlobClient object identical to the source but with the specified snapshot timestamp.
- * @memberof BlockBlobClient
+ * @returns {BlobClient} A new BlobClient object identical to the source but with the specified snapshot timestamp
+ * @memberof BlobClient
*/
- BlockBlobClient.prototype.withSnapshot = function (snapshot) {
- return new BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline);
+ BlobClient.prototype.withSnapshot = function (snapshot) {
+ return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline);
};
/**
- * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ * Creates a new BlobClient object pointing to a version of this blob.
+ * Provide "" will remove the versionId and return a Client to the base blob.
*
- * Quick query for a JSON or CSV formatted blob.
+ * @param {string} versionId The versionId.
+ * @returns {BlobClient} A new BlobClient object pointing to the version of this blob.
+ * @memberof BlobClient
+ */
+ BlobClient.prototype.withVersion = function (versionId) {
+ return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId.length === 0 ? undefined : versionId), this.pipeline);
+ };
+ /**
+ * Creates a AppendBlobClient object.
+ *
+ * @returns {AppendBlobClient}
+ * @memberof BlobClient
+ */
+ BlobClient.prototype.getAppendBlobClient = function () {
+ return new AppendBlobClient(this.url, this.pipeline);
+ };
+ /**
+ * Creates a BlockBlobClient object.
+ *
+ * @returns {BlockBlobClient}
+ * @memberof BlobClient
+ */
+ BlobClient.prototype.getBlockBlobClient = function () {
+ return new BlockBlobClient(this.url, this.pipeline);
+ };
+ /**
+ * Creates a PageBlobClient object.
+ *
+ * @returns {PageBlobClient}
+ * @memberof BlobClient
+ */
+ BlobClient.prototype.getPageBlobClient = function () {
+ return new PageBlobClient(this.url, this.pipeline);
+ };
+ /**
+ * Reads or downloads a blob from the system, including its metadata and properties.
+ * You can also call Get Blob to read a snapshot.
+ *
+ * * In Node.js, data returns in a Readable stream readableStreamBody
+ * * In browsers, data returns in a promise blobBody
+ *
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob
+ *
+ * @param {number} [offset] From which position of the blob to download, >= 0
+ * @param {number} [count] How much data to be downloaded, > 0. Will download to the end when undefined
+ * @param {BlobDownloadOptions} [options] Optional options to Blob Download operation.
+ * @returns {Promise}
+ * @memberof BlobClient
*
* Example usage (Node.js):
*
* ```js
- * // Query and convert a blob to a string
- * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage");
- * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString();
- * console.log("Query blob content:", downloaded);
+ * // Download and convert a blob to a string
+ * const downloadBlockBlobResponse = await blobClient.download();
+ * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody);
+ * console.log("Downloaded blob content:", downloaded.toString());
*
* async function streamToBuffer(readableStream) {
- * return new Promise((resolve, reject) => {
- * const chunks = [];
- * readableStream.on("data", (data) => {
- * chunks.push(data instanceof Buffer ? data : Buffer.from(data));
- * });
- * readableStream.on("end", () => {
- * resolve(Buffer.concat(chunks));
- * });
- * readableStream.on("error", reject);
- * });
+ * return new Promise((resolve, reject) => {
+ * const chunks = [];
+ * readableStream.on("data", (data) => {
+ * chunks.push(data instanceof Buffer ? data : Buffer.from(data));
+ * });
+ * readableStream.on("end", () => {
+ * resolve(Buffer.concat(chunks));
+ * });
+ * readableStream.on("error", reject);
+ * });
* }
* ```
*
- * @param {string} query
- * @param {BlockBlobQueryOptions} [options={}]
- * @returns {Promise}
- * @memberof BlockBlobClient
+ * Example usage (browser):
+ *
+ * ```js
+ * // Download and convert a blob to a string
+ * const downloadBlockBlobResponse = await blobClient.download();
+ * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody);
+ * console.log(
+ * "Downloaded blob content",
+ * downloaded
+ * );
+ *
+ * async function blobToString(blob: Blob): Promise {
+ * const fileReader = new FileReader();
+ * return new Promise((resolve, reject) => {
+ * fileReader.onloadend = (ev: any) => {
+ * resolve(ev.target!.result);
+ * };
+ * fileReader.onerror = reject;
+ * fileReader.readAsText(blob);
+ * });
+ * }
+ * ```
*/
- BlockBlobClient.prototype.query = function (query, options) {
+ BlobClient.prototype.download = function (offset, count, options) {
var _a;
+ if (offset === void 0) { offset = 0; }
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, response, e_23;
+ var _b, span, spanOptions, res_1, wrappedRes, e_1;
+ var _this = this;
return tslib.__generator(this, function (_c) {
switch (_c.label) {
case 0:
+ options.conditions = options.conditions || {};
+ options.conditions = options.conditions || {};
ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
- _b = createSpan("BlockBlobClient-query", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
+ _b = createSpan("BlobClient-download", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
_c.label = 1;
case 1:
_c.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this._blobContext.query({
+ return [4 /*yield*/, this.blobContext.download({
abortSignal: options.abortSignal,
- queryRequest: {
- expression: query,
- inputSerialization: toQuerySerialization(options.inputTextConfiguration),
- outputSerialization: toQuerySerialization(options.outputTextConfiguration)
- },
leaseAccessConditions: options.conditions,
modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+ onDownloadProgress: coreHttp.isNode ? undefined : options.onProgress,
+ range: offset === 0 && !count ? undefined : rangeToString({ offset: offset, count: count }),
+ rangeGetContentMD5: options.rangeGetContentMD5,
+ rangeGetContentCRC64: options.rangeGetContentCrc64,
+ snapshot: options.snapshot,
+ cpkInfo: options.customerProvidedKey,
spanOptions: spanOptions
})];
case 2:
- response = _c.sent();
- return [2 /*return*/, new BlobQueryResponse(response, {
+ res_1 = _c.sent();
+ wrappedRes = tslib.__assign(tslib.__assign({}, res_1), { _response: res_1._response, objectReplicationDestinationPolicyId: res_1.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res_1.objectReplicationRules) });
+ // We support retrying when download stream unexpected ends in Node.js runtime
+ // Following code shouldn't be bundled into browser build, however some
+ // bundlers may try to bundle following code and "FileReadResponse.ts".
+ // In this case, "FileDownloadResponse.browser.ts" will be used as a shim of "FileDownloadResponse.ts"
+ // The config is in package.json "browser" field
+ if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) {
+ // TODO: Default value or make it a required parameter?
+ options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS;
+ }
+ if (res_1.contentLength === undefined) {
+ throw new RangeError("File download response doesn't contain valid content length header");
+ }
+ if (!res_1.etag) {
+ throw new RangeError("File download response doesn't contain valid etag header");
+ }
+ return [2 /*return*/, new BlobDownloadResponse(wrappedRes, function (start) { return tslib.__awaiter(_this, void 0, void 0, function () {
+ var updatedOptions;
+ var _a;
+ return tslib.__generator(this, function (_b) {
+ switch (_b.label) {
+ case 0:
+ updatedOptions = {
+ leaseAccessConditions: options.conditions,
+ modifiedAccessConditions: {
+ ifMatch: options.conditions.ifMatch || res_1.etag,
+ ifModifiedSince: options.conditions.ifModifiedSince,
+ ifNoneMatch: options.conditions.ifNoneMatch,
+ ifUnmodifiedSince: options.conditions.ifUnmodifiedSince,
+ ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions
+ },
+ range: rangeToString({
+ count: offset + res_1.contentLength - start,
+ offset: start
+ }),
+ rangeGetContentMD5: options.rangeGetContentMD5,
+ rangeGetContentCRC64: options.rangeGetContentCrc64,
+ snapshot: options.snapshot,
+ cpkInfo: options.customerProvidedKey
+ };
+ return [4 /*yield*/, this.blobContext.download(tslib.__assign({ abortSignal: options.abortSignal }, updatedOptions))];
+ case 1:
+ // Debug purpose only
+ // console.log(
+ // `Read from internal stream, range: ${
+ // updatedOptions.range
+ // }, options: ${JSON.stringify(updatedOptions)}`
+ // );
+ return [2 /*return*/, (_b.sent()).readableStreamBody];
+ }
+ });
+ }); }, offset, res_1.contentLength, {
abortSignal: options.abortSignal,
- onProgress: options.onProgress,
- onError: options.onError
+ maxRetryRequests: options.maxRetryRequests,
+ onProgress: options.onProgress
})];
case 3:
- e_23 = _c.sent();
+ e_1 = _c.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_23.message
+ message: e_1.message
});
- throw e_23;
+ throw e_1;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -49305,68 +49314,51 @@ var BlockBlobClient = /** @class */ (function (_super) {
});
};
/**
- * Creates a new block blob, or updates the content of an existing block blob.
- * Updating an existing block blob overwrites any existing metadata on the blob.
- * Partial updates are not supported; the content of the existing blob is
- * overwritten with the new content. To perform a partial update of a block blob's,
- * use {@link stageBlock} and {@link commitBlockList}.
- *
- * This is a non-parallel uploading method, please use {@link uploadFile},
- * {@link uploadStream} or {@link uploadBrowserData} for better performance
- * with concurrency uploading.
- *
- * @see https://docs.microsoft.com/rest/api/storageservices/put-blob
- *
- * @param {HttpRequestBody} body Blob, string, ArrayBuffer, ArrayBufferView or a function
- * which returns a new Readable stream whose offset is from data source beginning.
- * @param {number} contentLength Length of body in bytes. Use Buffer.byteLength() to calculate body length for a
- * string including non non-Base64/Hex-encoded characters.
- * @param {BlockBlobUploadOptions} [options] Options to the Block Blob Upload operation.
- * @returns {Promise} Response data for the Block Blob Upload operation.
- * @memberof BlockBlobClient
+ * Returns true if the Azure blob resource represented by this client exists; false otherwise.
*
- * Example usage:
+ * NOTE: use this function with care since an existing blob might be deleted by other clients or
+ * applications. Vice versa new blobs might be added by other clients or applications after this
+ * function completes.
*
- * ```js
- * const content = "Hello world!";
- * const uploadBlobResponse = await blockBlobClient.upload(content, content.length);
- * ```
+ * @param {BlobExistsOptions} [options] options to Exists operation.
+ * @returns {Promise}
+ * @memberof BlobClient
*/
- BlockBlobClient.prototype.upload = function (body, contentLength, options) {
- var _a;
+ BlobClient.prototype.exists = function (options) {
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, e_24;
- return tslib.__generator(this, function (_c) {
- switch (_c.label) {
+ var _a, span, spanOptions, e_2;
+ return tslib.__generator(this, function (_b) {
+ switch (_b.label) {
case 0:
- options.conditions = options.conditions || {};
- _b = createSpan("BlockBlobClient-upload", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
- _c.label = 1;
+ _a = createSpan("BlobClient-exists", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _b.label = 1;
case 1:
- _c.trys.push([1, 3, 4, 5]);
+ _b.trys.push([1, 3, 4, 5]);
ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
- return [4 /*yield*/, this.blockBlobContext.upload(body, contentLength, {
+ return [4 /*yield*/, this.getProperties({
abortSignal: options.abortSignal,
- blobHTTPHeaders: options.blobHTTPHeaders,
- leaseAccessConditions: options.conditions,
- metadata: options.metadata,
- modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
- onUploadProgress: options.onProgress,
- cpkInfo: options.customerProvidedKey,
- encryptionScope: options.encryptionScope,
- tier: toAccessTier(options.tier),
- blobTagsString: toBlobTagsString(options.tags),
- spanOptions: spanOptions
+ customerProvidedKey: options.customerProvidedKey,
+ conditions: options.conditions,
+ tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions })
})];
- case 2: return [2 /*return*/, _c.sent()];
+ case 2:
+ _b.sent();
+ return [2 /*return*/, true];
case 3:
- e_24 = _c.sent();
+ e_2 = _b.sent();
+ if (e_2.statusCode === 404) {
+ span.setStatus({
+ code: api.CanonicalCode.NOT_FOUND,
+ message: "Expected exception when checking blob existence"
+ });
+ return [2 /*return*/, false];
+ }
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_24.message
+ message: e_2.message
});
- throw e_24;
+ throw e_2;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -49376,47 +49368,50 @@ var BlockBlobClient = /** @class */ (function (_super) {
});
};
/**
- * Uploads the specified block to the block blob's "staging area" to be later
- * committed by a call to commitBlockList.
- * @see https://docs.microsoft.com/rest/api/storageservices/put-block
+ * Returns all user-defined metadata, standard HTTP properties, and system properties
+ * for the blob. It does not return the content of the blob.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties
*
- * @param {string} blockId A 64-byte value that is base64-encoded
- * @param {HttpRequestBody} body Data to upload to the staging area.
- * @param {number} contentLength Number of bytes to upload.
- * @param {BlockBlobStageBlockOptions} [options] Options to the Block Blob Stage Block operation.
- * @returns {Promise} Response data for the Block Blob Stage Block operation.
- * @memberof BlockBlobClient
+ * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if
+ * they originally contained uppercase characters. This differs from the metadata keys returned by
+ * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which
+ * will retain their original casing.
+ *
+ * @param {BlobGetPropertiesOptions} [options] Optional options to Get Properties operation.
+ * @returns {Promise}
+ * @memberof BlobClient
*/
- BlockBlobClient.prototype.stageBlock = function (blockId, body, contentLength, options) {
+ BlobClient.prototype.getProperties = function (options) {
+ var _a;
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, e_25;
- return tslib.__generator(this, function (_b) {
- switch (_b.label) {
+ var _b, span, spanOptions, res, e_3;
+ return tslib.__generator(this, function (_c) {
+ switch (_c.label) {
case 0:
- _a = createSpan("BlockBlobClient-stageBlock", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
- _b.label = 1;
+ _b = createSpan("BlobClient-getProperties", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
+ _c.label = 1;
case 1:
- _b.trys.push([1, 3, 4, 5]);
+ _c.trys.push([1, 3, 4, 5]);
+ options.conditions = options.conditions || {};
ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
- return [4 /*yield*/, this.blockBlobContext.stageBlock(blockId, contentLength, body, {
+ return [4 /*yield*/, this.blobContext.getProperties({
abortSignal: options.abortSignal,
leaseAccessConditions: options.conditions,
- onUploadProgress: options.onProgress,
- transactionalContentMD5: options.transactionalContentMD5,
- transactionalContentCrc64: options.transactionalContentCrc64,
+ modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
cpkInfo: options.customerProvidedKey,
- encryptionScope: options.encryptionScope,
spanOptions: spanOptions
})];
- case 2: return [2 /*return*/, _b.sent()];
+ case 2:
+ res = _c.sent();
+ return [2 /*return*/, tslib.__assign(tslib.__assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) })];
case 3:
- e_25 = _b.sent();
+ e_3 = _c.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_25.message
+ message: e_3.message
});
- throw e_25;
+ throw e_3;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -49426,58 +49421,135 @@ var BlockBlobClient = /** @class */ (function (_super) {
});
};
/**
- * The Stage Block From URL operation creates a new block to be committed as part
- * of a blob where the contents are read from a URL.
- * This API is available starting in version 2018-03-28.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url
+ * Marks the specified blob or snapshot for deletion. The blob is later deleted
+ * during garbage collection. Note that in order to delete a blob, you must delete
+ * all of its snapshots. You can delete both at the same time with the Delete
+ * Blob operation.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob
*
- * @param {string} blockId A 64-byte value that is base64-encoded
- * @param {string} sourceURL Specifies the URL of the blob. The value
- * may be a URL of up to 2 KB in length that specifies a blob.
- * The value should be URL-encoded as it would appear
- * in a request URI. The source blob must either be public
- * or must be authenticated via a shared access signature.
- * If the source blob is public, no authentication is required
- * to perform the operation. Here are some examples of source object URLs:
- * - https://myaccount.blob.core.windows.net/mycontainer/myblob
- * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=
- * @param {number} [offset] From which position of the blob to download, >= 0
- * @param {number} [count] How much data to be downloaded, > 0. Will download to the end when undefined
- * @param {BlockBlobStageBlockFromURLOptions} [options={}] Options to the Block Blob Stage Block From URL operation.
- * @returns {Promise} Response data for the Block Blob Stage Block From URL operation.
- * @memberof BlockBlobClient
+ * @param {BlobDeleteOptions} [options] Optional options to Blob Delete operation.
+ * @returns {Promise}
+ * @memberof BlobClient
*/
- BlockBlobClient.prototype.stageBlockFromURL = function (blockId, sourceURL, offset, count, options) {
- if (offset === void 0) { offset = 0; }
+ BlobClient.prototype.delete = function (options) {
+ var _a;
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, e_26;
+ var _b, span, spanOptions, e_4;
+ return tslib.__generator(this, function (_c) {
+ switch (_c.label) {
+ case 0:
+ _b = createSpan("BlobClient-delete", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
+ options.conditions = options.conditions || {};
+ _c.label = 1;
+ case 1:
+ _c.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this.blobContext.deleteMethod({
+ abortSignal: options.abortSignal,
+ deleteSnapshots: options.deleteSnapshots,
+ leaseAccessConditions: options.conditions,
+ modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+ spanOptions: spanOptions
+ })];
+ case 2: return [2 /*return*/, _c.sent()];
+ case 3:
+ e_4 = _c.sent();
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_4.message
+ });
+ throw e_4;
+ case 4:
+ span.end();
+ return [7 /*endfinally*/];
+ case 5: return [2 /*return*/];
+ }
+ });
+ });
+ };
+ /**
+ * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted
+ * during garbage collection. Note that in order to delete a blob, you must delete
+ * all of its snapshots. You can delete both at the same time with the Delete
+ * Blob operation.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob
+ *
+ * @param {BlobDeleteOptions} [options] Optional options to Blob Delete operation.
+ * @returns {Promise}
+ * @memberof BlobClient
+ */
+ BlobClient.prototype.deleteIfExists = function (options) {
+ var _a, _b;
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var _c, span, spanOptions, res, e_5;
+ return tslib.__generator(this, function (_d) {
+ switch (_d.label) {
+ case 0:
+ _c = createSpan("BlobClient-deleteIfExists", options.tracingOptions), span = _c.span, spanOptions = _c.spanOptions;
+ _d.label = 1;
+ case 1:
+ _d.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this.delete(tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
+ case 2:
+ res = _d.sent();
+ return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable
+ })];
+ case 3:
+ e_5 = _d.sent();
+ if (((_a = e_5.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") {
+ span.setStatus({
+ code: api.CanonicalCode.NOT_FOUND,
+ message: "Expected exception when deleting a blob or snapshot only if it exists."
+ });
+ return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_5.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_5.response })];
+ }
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_5.message
+ });
+ throw e_5;
+ case 4:
+ span.end();
+ return [7 /*endfinally*/];
+ case 5: return [2 /*return*/];
+ }
+ });
+ });
+ };
+ /**
+ * Restores the contents and metadata of soft deleted blob and any associated
+ * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29
+ * or later.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob
+ *
+ * @param {BlobUndeleteOptions} [options] Optional options to Blob Undelete operation.
+ * @returns {Promise}
+ * @memberof BlobClient
+ */
+ BlobClient.prototype.undelete = function (options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var _a, span, spanOptions, e_6;
return tslib.__generator(this, function (_b) {
switch (_b.label) {
case 0:
- _a = createSpan("BlockBlobClient-stageBlockFromURL", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _a = createSpan("BlobClient-undelete", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
_b.label = 1;
case 1:
_b.trys.push([1, 3, 4, 5]);
- ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
- return [4 /*yield*/, this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, {
+ return [4 /*yield*/, this.blobContext.undelete({
abortSignal: options.abortSignal,
- leaseAccessConditions: options.conditions,
- sourceContentMD5: options.sourceContentMD5,
- sourceContentCrc64: options.sourceContentCrc64,
- sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset: offset, count: count }),
- cpkInfo: options.customerProvidedKey,
- encryptionScope: options.encryptionScope,
spanOptions: spanOptions
})];
case 2: return [2 /*return*/, _b.sent()];
case 3:
- e_26 = _b.sent();
+ e_6 = _b.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_26.message
+ message: e_6.message
});
- throw e_26;
+ throw e_6;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -49487,52 +49559,49 @@ var BlockBlobClient = /** @class */ (function (_super) {
});
};
/**
- * Writes a blob by specifying the list of block IDs that make up the blob.
- * In order to be written as part of a blob, a block must have been successfully written
- * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to
- * update a blob by uploading only those blocks that have changed, then committing the new and existing
- * blocks together. Any blocks not specified in the block list and permanently deleted.
- * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list
+ * Sets system properties on the blob.
*
- * @param {string[]} blocks Array of 64-byte value that is base64-encoded
- * @param {BlockBlobCommitBlockListOptions} [options] Options to the Block Blob Commit Block List operation.
- * @returns {Promise} Response data for the Block Blob Commit Block List operation.
- * @memberof BlockBlobClient
+ * If no value provided, or no value provided for the specified blob HTTP headers,
+ * these blob HTTP headers without a value will be cleared.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties
+ *
+ * @param {BlobHTTPHeaders} [blobHTTPHeaders] If no value provided, or no value provided for
+ * the specified blob HTTP headers, these blob HTTP
+ * headers without a value will be cleared.
+ * @param {BlobSetHTTPHeadersOptions} [options] Optional options to Blob Set HTTP Headers operation.
+ * @returns {Promise}
+ * @memberof BlobClient
*/
- BlockBlobClient.prototype.commitBlockList = function (blocks, options) {
+ BlobClient.prototype.setHTTPHeaders = function (blobHTTPHeaders, options) {
var _a;
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, e_27;
+ var _b, span, spanOptions, e_7;
return tslib.__generator(this, function (_c) {
switch (_c.label) {
case 0:
+ _b = createSpan("BlobClient-setHTTPHeaders", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
options.conditions = options.conditions || {};
- _b = createSpan("BlockBlobClient-commitBlockList", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
_c.label = 1;
case 1:
_c.trys.push([1, 3, 4, 5]);
ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
- return [4 /*yield*/, this.blockBlobContext.commitBlockList({ latest: blocks }, {
+ return [4 /*yield*/, this.blobContext.setHTTPHeaders({
abortSignal: options.abortSignal,
- blobHTTPHeaders: options.blobHTTPHeaders,
+ blobHTTPHeaders: blobHTTPHeaders,
leaseAccessConditions: options.conditions,
- metadata: options.metadata,
modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
cpkInfo: options.customerProvidedKey,
- encryptionScope: options.encryptionScope,
- tier: toAccessTier(options.tier),
- blobTagsString: toBlobTagsString(options.tags),
spanOptions: spanOptions
})];
case 2: return [2 /*return*/, _c.sent()];
case 3:
- e_27 = _c.sent();
+ e_7 = _c.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_27.message
+ message: e_7.message
});
- throw e_27;
+ throw e_7;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -49542,50 +49611,49 @@ var BlockBlobClient = /** @class */ (function (_super) {
});
};
/**
- * Returns the list of blocks that have been uploaded as part of a block blob
- * using the specified block list filter.
- * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list
+ * Sets user-defined metadata for the specified blob as one or more name-value pairs.
*
- * @param {BlockListType} listType Specifies whether to return the list of committed blocks,
- * the list of uncommitted blocks, or both lists together.
- * @param {BlockBlobGetBlockListOptions} [options] Options to the Block Blob Get Block List operation.
- * @returns {Promise} Response data for the Block Blob Get Block List operation.
- * @memberof BlockBlobClient
+ * If no option provided, or no metadata defined in the parameter, the blob
+ * metadata will be removed.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata
+ *
+ * @param {Metadata} [metadata] Replace existing metadata with this value.
+ * If no value provided the existing metadata will be removed.
+ * @param {BlobSetMetadataOptions} [options] Optional options to Set Metadata operation.
+ * @returns {Promise}
+ * @memberof BlobClient
*/
- BlockBlobClient.prototype.getBlockList = function (listType, options) {
+ BlobClient.prototype.setMetadata = function (metadata, options) {
var _a;
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, res, e_28;
+ var _b, span, spanOptions, e_8;
return tslib.__generator(this, function (_c) {
switch (_c.label) {
case 0:
- _b = createSpan("BlockBlobClient-getBlockList", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
+ _b = createSpan("BlobClient-setMetadata", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
+ options.conditions = options.conditions || {};
_c.label = 1;
case 1:
_c.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.blockBlobContext.getBlockList(listType, {
+ ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+ return [4 /*yield*/, this.blobContext.setMetadata({
abortSignal: options.abortSignal,
leaseAccessConditions: options.conditions,
+ metadata: metadata,
modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+ cpkInfo: options.customerProvidedKey,
+ encryptionScope: options.encryptionScope,
spanOptions: spanOptions
})];
- case 2:
- res = _c.sent();
- if (!res.committedBlocks) {
- res.committedBlocks = [];
- }
- if (!res.uncommittedBlocks) {
- res.uncommittedBlocks = [];
- }
- return [2 /*return*/, res];
+ case 2: return [2 /*return*/, _c.sent()];
case 3:
- e_28 = _c.sent();
+ e_8 = _c.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_28.message
+ message: e_8.message
});
- throw e_28;
+ throw e_8;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -49594,45 +49662,43 @@ var BlockBlobClient = /** @class */ (function (_super) {
});
});
};
- // High level functions
/**
- * ONLY AVAILABLE IN BROWSERS.
- *
- * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob.
- *
- * When buffer length <= 256MB, this method will use 1 upload call to finish the upload.
- * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call
- * {@link commitBlockList} to commit the block list.
+ * Sets tags on the underlying blob.
+ * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters.
+ * Valid tag key and value characters include lower and upper case letters, digits (0-9),
+ * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_').
*
- * @export
- * @param {Blob | ArrayBuffer | ArrayBufferView} browserData Blob, File, ArrayBuffer or ArrayBufferView
- * @param {BlockBlobParallelUploadOptions} [options] Options to upload browser data.
- * @returns {Promise} Response data for the Blob Upload operation.
- * @memberof BlockBlobClient
+ * @param {Tags} tags
+ * @param {BlobSetTagsOptions} [options={}]
+ * @returns {Promise}
+ * @memberof BlobClient
*/
- BlockBlobClient.prototype.uploadBrowserData = function (browserData, options) {
+ BlobClient.prototype.setTags = function (tags, options) {
+ var _a;
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, browserBlob_1, e_29;
- return tslib.__generator(this, function (_b) {
- switch (_b.label) {
+ var _b, span, spanOptions, e_9;
+ return tslib.__generator(this, function (_c) {
+ switch (_c.label) {
case 0:
- _a = createSpan("BlockBlobClient-uploadBrowserData", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
- _b.label = 1;
+ _b = createSpan("BlobClient-setTags", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
+ _c.label = 1;
case 1:
- _b.trys.push([1, 3, 4, 5]);
- browserBlob_1 = new Blob([browserData]);
- return [4 /*yield*/, this.uploadSeekableBlob(function (offset, size) {
- return browserBlob_1.slice(offset, offset + size);
- }, browserBlob_1.size, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
- case 2: return [2 /*return*/, _b.sent()];
+ _c.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this.blobContext.setTags({
+ abortSignal: options.abortSignal,
+ modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+ spanOptions: spanOptions,
+ tags: toBlobTags(tags)
+ })];
+ case 2: return [2 /*return*/, _c.sent()];
case 3:
- e_29 = _b.sent();
+ e_9 = _c.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_29.message
+ message: e_9.message
});
- throw e_29;
+ throw e_9;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -49642,526 +49708,317 @@ var BlockBlobClient = /** @class */ (function (_super) {
});
};
/**
- * ONLY AVAILABLE IN BROWSERS.
- *
- * Uploads a browser {@link Blob} object to block blob. Requires a blobFactory as the data source,
- * which need to return a {@link Blob} object with the offset and size provided.
- *
- * When buffer length <= 256MB, this method will use 1 upload call to finish the upload.
- * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList
- * to commit the block list.
+ * Gets the tags associated with the underlying blob.
*
- * @param {(offset: number, size: number) => Blob} blobFactory
- * @param {number} size size of the data to upload.
- * @param {BlockBlobParallelUploadOptions} [options] Options to Upload to Block Blob operation.
- * @returns {Promise} Response data for the Blob Upload operation.
- * @memberof BlockBlobClient
+ * @param {BlobGetTagsOptions} [options={}]
+ * @returns {Promise}
+ * @memberof BlobClient
*/
- BlockBlobClient.prototype.uploadSeekableBlob = function (blobFactory, size, options) {
+ BlobClient.prototype.getTags = function (options) {
+ var _a;
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, numBlocks_1, blockList_1, blockIDPrefix_1, transferProgress_2, batch, _loop_2, i, e_30;
- var _this = this;
- return tslib.__generator(this, function (_b) {
- switch (_b.label) {
+ var _b, span, spanOptions, response, wrappedResponse, e_10;
+ return tslib.__generator(this, function (_c) {
+ switch (_c.label) {
case 0:
- if (!options.blockSize) {
- options.blockSize = 0;
- }
- if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) {
- throw new RangeError("blockSize option must be >= 0 and <= " + BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES);
- }
- if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) {
- options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES;
- }
- if (options.maxSingleShotSize < 0 ||
- options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) {
- throw new RangeError("maxSingleShotSize option must be >= 0 and <= " + BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES);
- }
- if (options.blockSize === 0) {
- if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) {
- throw new RangeError(size + " is too larger to upload to a block blob.");
- }
- if (size > options.maxSingleShotSize) {
- options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS);
- if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) {
- options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES;
- }
- }
- }
- if (!options.blobHTTPHeaders) {
- options.blobHTTPHeaders = {};
- }
- if (!options.conditions) {
- options.conditions = {};
- }
- _a = createSpan("BlockBlobClient-UploadSeekableBlob", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
- _b.label = 1;
+ _b = createSpan("BlobClient-getTags", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
+ _c.label = 1;
case 1:
- _b.trys.push([1, 5, 6, 7]);
- if (!(size <= options.maxSingleShotSize)) return [3 /*break*/, 3];
- return [4 /*yield*/, this.upload(blobFactory(0, size), size, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
- case 2: return [2 /*return*/, _b.sent()];
+ _c.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this.blobContext.getTags({
+ abortSignal: options.abortSignal,
+ modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+ spanOptions: spanOptions
+ })];
+ case 2:
+ response = _c.sent();
+ wrappedResponse = tslib.__assign(tslib.__assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} });
+ return [2 /*return*/, wrappedResponse];
case 3:
- numBlocks_1 = Math.floor((size - 1) / options.blockSize) + 1;
- if (numBlocks_1 > BLOCK_BLOB_MAX_BLOCKS) {
- throw new RangeError("The buffer's size is too big or the BlockSize is too small;" +
- ("the number of blocks must be <= " + BLOCK_BLOB_MAX_BLOCKS));
- }
- blockList_1 = [];
- blockIDPrefix_1 = coreHttp.generateUuid();
- transferProgress_2 = 0;
- batch = new Batch(options.concurrency);
- _loop_2 = function (i) {
- batch.addOperation(function () { return tslib.__awaiter(_this, void 0, void 0, function () {
- var blockID, start, end, contentLength;
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0:
- blockID = generateBlockID(blockIDPrefix_1, i);
- start = options.blockSize * i;
- end = i === numBlocks_1 - 1 ? size : start + options.blockSize;
- contentLength = end - start;
- blockList_1.push(blockID);
- return [4 /*yield*/, this.stageBlock(blockID, blobFactory(start, contentLength), contentLength, {
- abortSignal: options.abortSignal,
- conditions: options.conditions,
- encryptionScope: options.encryptionScope,
- tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions })
- })];
- case 1:
- _a.sent();
- // Update progress after block is successfully uploaded to server, in case of block trying
- // TODO: Hook with convenience layer progress event in finer level
- transferProgress_2 += contentLength;
- if (options.onProgress) {
- options.onProgress({
- loadedBytes: transferProgress_2
- });
- }
- return [2 /*return*/];
- }
- });
- }); });
- };
- for (i = 0; i < numBlocks_1; i++) {
- _loop_2(i);
- }
- return [4 /*yield*/, batch.do()];
- case 4:
- _b.sent();
- return [2 /*return*/, this.commitBlockList(blockList_1, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
- case 5:
- e_30 = _b.sent();
+ e_10 = _c.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_30.message
+ message: e_10.message
});
- throw e_30;
- case 6:
+ throw e_10;
+ case 4:
span.end();
return [7 /*endfinally*/];
- case 7: return [2 /*return*/];
+ case 5: return [2 /*return*/];
}
});
});
};
/**
- * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ * Get a {@link BlobLeaseClient} that manages leases on the blob.
*
- * Uploads a local file in blocks to a block blob.
+ * @param {string} [proposeLeaseId] Initial proposed lease Id.
+ * @returns {BlobLeaseClient} A new BlobLeaseClient object for managing leases on the blob.
+ * @memberof BlobClient
+ */
+ BlobClient.prototype.getBlobLeaseClient = function (proposeLeaseId) {
+ return new BlobLeaseClient(this, proposeLeaseId);
+ };
+ /**
+ * Creates a read-only snapshot of a blob.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob
*
- * When file size <= 256MB, this method will use 1 upload call to finish the upload.
- * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList
- * to commit the block list.
- *
- * @param {string} filePath Full path of local file
- * @param {BlockBlobParallelUploadOptions} [options] Options to Upload to Block Blob operation.
- * @returns {(Promise)} Response data for the Blob Upload operation.
- * @memberof BlockBlobClient
+ * @param {BlobCreateSnapshotOptions} [options] Optional options to the Blob Create Snapshot operation.
+ * @returns {Promise}
+ * @memberof BlobClient
*/
- BlockBlobClient.prototype.uploadFile = function (filePath, options) {
+ BlobClient.prototype.createSnapshot = function (options) {
+ var _a;
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, size, e_31;
- return tslib.__generator(this, function (_b) {
- switch (_b.label) {
+ var _b, span, spanOptions, e_11;
+ return tslib.__generator(this, function (_c) {
+ switch (_c.label) {
case 0:
- _a = createSpan("BlockBlobClient-uploadFile", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
- _b.label = 1;
+ _b = createSpan("BlobClient-createSnapshot", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
+ options.conditions = options.conditions || {};
+ _c.label = 1;
case 1:
- _b.trys.push([1, 4, 5, 6]);
- return [4 /*yield*/, fsStat(filePath)];
- case 2:
- size = (_b.sent()).size;
- return [4 /*yield*/, this.uploadResetableStream(function (offset, count) {
- return fsCreateReadStream(filePath, {
- autoClose: true,
- end: count ? offset + count - 1 : Infinity,
- start: offset
- });
- }, size, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
- case 3: return [2 /*return*/, _b.sent()];
- case 4:
- e_31 = _b.sent();
+ _c.trys.push([1, 3, 4, 5]);
+ ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+ return [4 /*yield*/, this.blobContext.createSnapshot({
+ abortSignal: options.abortSignal,
+ leaseAccessConditions: options.conditions,
+ metadata: options.metadata,
+ modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+ cpkInfo: options.customerProvidedKey,
+ encryptionScope: options.encryptionScope,
+ spanOptions: spanOptions
+ })];
+ case 2: return [2 /*return*/, _c.sent()];
+ case 3:
+ e_11 = _c.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_31.message
+ message: e_11.message
});
- throw e_31;
- case 5:
+ throw e_11;
+ case 4:
span.end();
return [7 /*endfinally*/];
- case 6: return [2 /*return*/];
+ case 5: return [2 /*return*/];
}
});
});
};
/**
- * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ * Asynchronously copies a blob to a destination within the storage account.
+ * This method returns a long running operation poller that allows you to wait
+ * indefinitely until the copy is completed.
+ * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller.
+ * Note that the onProgress callback will not be invoked if the operation completes in the first
+ * request, and attempting to cancel a completed copy will result in an error being thrown.
*
- * Uploads a Node.js Readable stream into block blob.
+ * In version 2012-02-12 and later, the source for a Copy Blob operation can be
+ * a committed blob in any Azure storage account.
+ * Beginning with version 2015-02-21, the source for a Copy Blob operation can be
+ * an Azure file in any Azure storage account.
+ * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob
+ * operation to copy from another storage account.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob
*
- * PERFORMANCE IMPROVEMENT TIPS:
- * * Input stream highWaterMark is better to set a same value with bufferSize
- * parameter, which will avoid Buffer.concat() operations.
+ * Example using automatic polling:
*
- * @param {Readable} stream Node.js Readable stream
- * @param {number} bufferSize Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB
- * @param {number} maxConcurrency Max concurrency indicates the max number of buffers that can be allocated,
- * positive correlation with max uploading concurrency. Default value is 5
- * @param {BlockBlobUploadStreamOptions} [options] Options to Upload Stream to Block Blob operation.
- * @returns {Promise} Response data for the Blob Upload operation.
- * @memberof BlockBlobClient
+ * ```js
+ * const copyPoller = await blobClient.beginCopyFromURL('url');
+ * const result = await copyPoller.pollUntilDone();
+ * ```
+ *
+ * Example using manual polling:
+ *
+ * ```js
+ * const copyPoller = await blobClient.beginCopyFromURL('url');
+ * while (!poller.isDone()) {
+ * await poller.poll();
+ * }
+ * const result = copyPoller.getResult();
+ * ```
+ *
+ * Example using progress updates:
+ *
+ * ```js
+ * const copyPoller = await blobClient.beginCopyFromURL('url', {
+ * onProgress(state) {
+ * console.log(`Progress: ${state.copyProgress}`);
+ * }
+ * });
+ * const result = await copyPoller.pollUntilDone();
+ * ```
+ *
+ * Example using a changing polling interval (default 15 seconds):
+ *
+ * ```js
+ * const copyPoller = await blobClient.beginCopyFromURL('url', {
+ * intervalInMs: 1000 // poll blob every 1 second for copy progress
+ * });
+ * const result = await copyPoller.pollUntilDone();
+ * ```
+ *
+ * Example using copy cancellation:
+ *
+ * ```js
+ * const copyPoller = await blobClient.beginCopyFromURL('url');
+ * // cancel operation after starting it.
+ * try {
+ * await copyPoller.cancelOperation();
+ * // calls to get the result now throw PollerCancelledError
+ * await copyPoller.getResult();
+ * } catch (err) {
+ * if (err.name === 'PollerCancelledError') {
+ * console.log('The copy was cancelled.');
+ * }
+ * }
+ * ```
+ *
+ * @param {string} copySource url to the source Azure Blob/File.
+ * @param {BlobBeginCopyFromURLOptions} [options] Optional options to the Blob Start Copy From URL operation.
*/
- BlockBlobClient.prototype.uploadStream = function (stream, bufferSize, maxConcurrency, options) {
- if (bufferSize === void 0) { bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES; }
- if (maxConcurrency === void 0) { maxConcurrency = 5; }
+ BlobClient.prototype.beginCopyFromURL = function (copySource, options) {
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, blockNum_1, blockIDPrefix_2, transferProgress_3, blockList_2, scheduler, e_32;
+ var client, poller;
var _this = this;
- return tslib.__generator(this, function (_b) {
- switch (_b.label) {
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
case 0:
- if (!options.blobHTTPHeaders) {
- options.blobHTTPHeaders = {};
- }
- if (!options.conditions) {
- options.conditions = {};
- }
- _a = createSpan("BlockBlobClient-uploadStream", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
- _b.label = 1;
- case 1:
- _b.trys.push([1, 4, 5, 6]);
- blockNum_1 = 0;
- blockIDPrefix_2 = coreHttp.generateUuid();
- transferProgress_3 = 0;
- blockList_2 = [];
- scheduler = new BufferScheduler(stream, bufferSize, maxConcurrency, function (body, length) { return tslib.__awaiter(_this, void 0, void 0, function () {
- var blockID;
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0:
- blockID = generateBlockID(blockIDPrefix_2, blockNum_1);
- blockList_2.push(blockID);
- blockNum_1++;
- return [4 /*yield*/, this.stageBlock(blockID, body, length, {
- conditions: options.conditions,
- encryptionScope: options.encryptionScope,
- tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions })
- })];
- case 1:
- _a.sent();
- // Update progress after block is successfully uploaded to server, in case of block trying
- transferProgress_3 += length;
- if (options.onProgress) {
- options.onProgress({ loadedBytes: transferProgress_3 });
- }
- return [2 /*return*/];
+ client = {
+ abortCopyFromURL: function () {
+ var args = [];
+ for (var _i = 0; _i < arguments.length; _i++) {
+ args[_i] = arguments[_i];
}
- });
- }); },
- // concurrency should set a smaller value than maxConcurrency, which is helpful to
- // reduce the possibility when a outgoing handler waits for stream data, in
- // this situation, outgoing handlers are blocked.
- // Outgoing queue shouldn't be empty.
- Math.ceil((maxConcurrency / 4) * 3));
- return [4 /*yield*/, scheduler.do()];
- case 2:
- _b.sent();
- return [4 /*yield*/, this.commitBlockList(blockList_2, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
- case 3: return [2 /*return*/, _b.sent()];
- case 4:
- e_32 = _b.sent();
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_32.message
+ return _this.abortCopyFromURL.apply(_this, args);
+ },
+ getProperties: function () {
+ var args = [];
+ for (var _i = 0; _i < arguments.length; _i++) {
+ args[_i] = arguments[_i];
+ }
+ return _this.getProperties.apply(_this, args);
+ },
+ startCopyFromURL: function () {
+ var args = [];
+ for (var _i = 0; _i < arguments.length; _i++) {
+ args[_i] = arguments[_i];
+ }
+ return _this.startCopyFromURL.apply(_this, args);
+ }
+ };
+ poller = new BlobBeginCopyFromUrlPoller({
+ blobClient: client,
+ copySource: copySource,
+ intervalInMs: options.intervalInMs,
+ onProgress: options.onProgress,
+ resumeFrom: options.resumeFrom,
+ startCopyFromURLOptions: options
});
- throw e_32;
- case 5:
- span.end();
- return [7 /*endfinally*/];
- case 6: return [2 /*return*/];
+ // Trigger the startCopyFromURL call by calling poll.
+ // Any errors from this method should be surfaced to the user.
+ return [4 /*yield*/, poller.poll()];
+ case 1:
+ // Trigger the startCopyFromURL call by calling poll.
+ // Any errors from this method should be surfaced to the user.
+ _a.sent();
+ return [2 /*return*/, poller];
}
});
});
};
/**
- * ONLY AVAILABLE IN NODE.JS RUNTIME.
- *
- * Accepts a Node.js Readable stream factory, and uploads in blocks to a block blob.
- * The Readable stream factory must returns a Node.js Readable stream starting from the offset defined. The offset
- * is the offset in the block blob to be uploaded.
- *
- * When buffer length <= 256MB, this method will use 1 upload call to finish the upload.
- * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList}
- * to commit the block list.
+ * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero
+ * length and full metadata. Version 2012-02-12 and newer.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob
*
- * @export
- * @param {(offset: number) => NodeJS.ReadableStream} streamFactory Returns a Node.js Readable stream starting
- * from the offset defined
- * @param {number} size Size of the block blob
- * @param {BlockBlobParallelUploadOptions} [options] Options to Upload to Block Blob operation.
- * @returns {(Promise)} Response data for the Blob Upload operation.
- * @memberof BlockBlobClient
+ * @param {string} copyId Id of the Copy From URL operation.
+ * @param {BlobAbortCopyFromURLOptions} [options] Optional options to the Blob Abort Copy From URL operation.
+ * @returns {Promise}
+ * @memberof BlobClient
*/
- BlockBlobClient.prototype.uploadResetableStream = function (streamFactory, size, options) {
+ BlobClient.prototype.abortCopyFromURL = function (copyId, options) {
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, numBlocks_2, blockList_3, blockIDPrefix_3, transferProgress_4, batch, _loop_3, i, e_33;
- var _this = this;
+ var _a, span, spanOptions, e_12;
return tslib.__generator(this, function (_b) {
switch (_b.label) {
case 0:
- if (!options.blockSize) {
- options.blockSize = 0;
- }
- if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) {
- throw new RangeError("blockSize option must be >= 0 and <= " + BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES);
- }
- if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) {
- options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES;
- }
- if (options.maxSingleShotSize < 0 ||
- options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) {
- throw new RangeError("maxSingleShotSize option must be >= 0 and <= " + BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES);
- }
- if (options.blockSize === 0) {
- if (size > BLOCK_BLOB_MAX_BLOCKS * BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) {
- throw new RangeError(size + " is too larger to upload to a block blob.");
- }
- if (size > options.maxSingleShotSize) {
- options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS);
- if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) {
- options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES;
- }
- }
- }
- if (!options.blobHTTPHeaders) {
- options.blobHTTPHeaders = {};
- }
- if (!options.conditions) {
- options.conditions = {};
- }
- _a = createSpan("BlockBlobClient-uploadResetableStream", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _a = createSpan("BlobClient-abortCopyFromURL", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
_b.label = 1;
case 1:
- _b.trys.push([1, 6, 7, 8]);
- if (!(size <= options.maxSingleShotSize)) return [3 /*break*/, 3];
- return [4 /*yield*/, this.upload(function () { return streamFactory(0); }, size, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
+ _b.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this.blobContext.abortCopyFromURL(copyId, {
+ abortSignal: options.abortSignal,
+ leaseAccessConditions: options.conditions,
+ spanOptions: spanOptions
+ })];
case 2: return [2 /*return*/, _b.sent()];
case 3:
- numBlocks_2 = Math.floor((size - 1) / options.blockSize) + 1;
- if (numBlocks_2 > BLOCK_BLOB_MAX_BLOCKS) {
- throw new RangeError("The buffer's size is too big or the BlockSize is too small;" +
- ("the number of blocks must be <= " + BLOCK_BLOB_MAX_BLOCKS));
- }
- blockList_3 = [];
- blockIDPrefix_3 = coreHttp.generateUuid();
- transferProgress_4 = 0;
- batch = new Batch(options.concurrency);
- _loop_3 = function (i) {
- batch.addOperation(function () { return tslib.__awaiter(_this, void 0, void 0, function () {
- var blockID, start, end, contentLength;
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0:
- blockID = generateBlockID(blockIDPrefix_3, i);
- start = options.blockSize * i;
- end = i === numBlocks_2 - 1 ? size : start + options.blockSize;
- contentLength = end - start;
- blockList_3.push(blockID);
- return [4 /*yield*/, this.stageBlock(blockID, function () { return streamFactory(start, contentLength); }, contentLength, {
- abortSignal: options.abortSignal,
- conditions: options.conditions,
- encryptionScope: options.encryptionScope,
- tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions })
- })];
- case 1:
- _a.sent();
- // Update progress after block is successfully uploaded to server, in case of block trying
- transferProgress_4 += contentLength;
- if (options.onProgress) {
- options.onProgress({ loadedBytes: transferProgress_4 });
- }
- return [2 /*return*/];
- }
- });
- }); });
- };
- for (i = 0; i < numBlocks_2; i++) {
- _loop_3(i);
- }
- return [4 /*yield*/, batch.do()];
- case 4:
- _b.sent();
- return [4 /*yield*/, this.commitBlockList(blockList_3, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
- case 5: return [2 /*return*/, _b.sent()];
- case 6:
- e_33 = _b.sent();
+ e_12 = _b.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_33.message
+ message: e_12.message
});
- throw e_33;
- case 7:
+ throw e_12;
+ case 4:
span.end();
return [7 /*endfinally*/];
- case 8: return [2 /*return*/];
+ case 5: return [2 /*return*/];
}
});
});
};
- return BlockBlobClient;
-}(BlobClient));
-/**
- * PageBlobClient defines a set of operations applicable to page blobs.
- *
- * @export
- * @class PageBlobClient
- * @extends {BlobClient}
- */
-var PageBlobClient = /** @class */ (function (_super) {
- tslib.__extends(PageBlobClient, _super);
- function PageBlobClient(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) {
- var _this = this;
- // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.
- // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);
- var pipeline;
- var url;
- options = options || {};
- if (credentialOrPipelineOrContainerName instanceof Pipeline) {
- // (url: string, pipeline: Pipeline)
- url = urlOrConnectionString;
- pipeline = credentialOrPipelineOrContainerName;
- }
- else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||
- credentialOrPipelineOrContainerName instanceof AnonymousCredential ||
- coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) {
- // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)
- url = urlOrConnectionString;
- options = blobNameOrOptions;
- pipeline = newPipeline(credentialOrPipelineOrContainerName, options);
- }
- else if (!credentialOrPipelineOrContainerName &&
- typeof credentialOrPipelineOrContainerName !== "string") {
- // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)
- // The second parameter is undefined. Use anonymous credential.
- url = urlOrConnectionString;
- pipeline = newPipeline(new AnonymousCredential(), options);
- }
- else if (credentialOrPipelineOrContainerName &&
- typeof credentialOrPipelineOrContainerName === "string" &&
- blobNameOrOptions &&
- typeof blobNameOrOptions === "string") {
- // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)
- var containerName = credentialOrPipelineOrContainerName;
- var blobName = blobNameOrOptions;
- var extractedCreds = extractConnectionStringParts(urlOrConnectionString);
- if (extractedCreds.kind === "AccountConnString") {
- {
- var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);
- url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName));
- options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri);
- pipeline = newPipeline(sharedKeyCredential, options);
- }
- }
- else if (extractedCreds.kind === "SASConnString") {
- url =
- appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) +
- "?" +
- extractedCreds.accountSas;
- pipeline = newPipeline(new AnonymousCredential(), options);
- }
- else {
- throw new Error("Connection string must be either an Account connection string or a SAS connection string");
- }
- }
- else {
- throw new Error("Expecting non-empty strings for containerName and blobName parameters");
- }
- _this = _super.call(this, url, pipeline) || this;
- _this.pageBlobContext = new PageBlob(_this.storageClientContext);
- return _this;
- }
- /**
- * Creates a new PageBlobClient object identical to the source but with the
- * specified snapshot timestamp.
- * Provide "" will remove the snapshot and return a Client to the base blob.
- *
- * @param {string} snapshot The snapshot timestamp.
- * @returns {PageBlobClient} A new PageBlobClient object identical to the source but with the specified snapshot timestamp.
- * @memberof PageBlobClient
- */
- PageBlobClient.prototype.withSnapshot = function (snapshot) {
- return new PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline);
- };
/**
- * Creates a page blob of the specified length. Call uploadPages to upload data
- * data to a page blob.
- * @see https://docs.microsoft.com/rest/api/storageservices/put-blob
+ * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not
+ * return a response until the copy is complete.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url
*
- * @param {number} size size of the page blob.
- * @param {PageBlobCreateOptions} [options] Options to the Page Blob Create operation.
- * @returns {Promise} Response data for the Page Blob Create operation.
- * @memberof PageBlobClient
+ * @param {string} copySource The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication
+ * @param {BlobSyncCopyFromURLOptions} [options={}]
+ * @returns {Promise}
+ * @memberof BlobClient
*/
- PageBlobClient.prototype.create = function (size, options) {
+ BlobClient.prototype.syncCopyFromURL = function (copySource, options) {
var _a;
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, e_34;
+ var _b, span, spanOptions, e_13;
return tslib.__generator(this, function (_c) {
switch (_c.label) {
case 0:
+ _b = createSpan("BlobClient-syncCopyFromURL", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
options.conditions = options.conditions || {};
- _b = createSpan("PageBlobClient-create", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
+ options.sourceConditions = options.sourceConditions || {};
_c.label = 1;
case 1:
_c.trys.push([1, 3, 4, 5]);
- ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
- return [4 /*yield*/, this.pageBlobContext.create(0, size, {
+ return [4 /*yield*/, this.blobContext.copyFromURL(copySource, {
abortSignal: options.abortSignal,
- blobHTTPHeaders: options.blobHTTPHeaders,
- blobSequenceNumber: options.blobSequenceNumber,
- leaseAccessConditions: options.conditions,
metadata: options.metadata,
+ leaseAccessConditions: options.conditions,
modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
- cpkInfo: options.customerProvidedKey,
- encryptionScope: options.encryptionScope,
- tier: toAccessTier(options.tier),
+ sourceModifiedAccessConditions: {
+ sourceIfMatch: options.sourceConditions.ifMatch,
+ sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,
+ sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,
+ sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince
+ },
+ sourceContentMD5: options.sourceContentMD5,
blobTagsString: toBlobTagsString(options.tags),
spanOptions: spanOptions
})];
case 2: return [2 /*return*/, _c.sent()];
case 3:
- e_34 = _c.sent();
+ e_13 = _c.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_34.message
+ message: e_13.message
});
- throw e_34;
+ throw e_13;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -50171,48 +50028,45 @@ var PageBlobClient = /** @class */ (function (_super) {
});
};
/**
- * Creates a page blob of the specified length. Call uploadPages to upload data
- * data to a page blob. If the blob with the same name already exists, the content
- * of the existing blob will remain unchanged.
- * @see https://docs.microsoft.com/rest/api/storageservices/put-blob
+ * Sets the tier on a blob. The operation is allowed on a page blob in a premium
+ * storage account and on a block blob in a blob storage account (locally redundant
+ * storage only). A premium page blob's tier determines the allowed size, IOPS,
+ * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive
+ * storage type. This operation does not update the blob's ETag.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier
*
- * @param {number} size size of the page blob.
- * @param {PageBlobCreateIfNotExistsOptions} [options]
- * @returns {Promise}
- * @memberof PageBlobClient
- */
- PageBlobClient.prototype.createIfNotExists = function (size, options) {
- var _a, _b;
+ * @param {BlockBlobTier | PremiumPageBlobTier | string} tier The tier to be set on the blob. Valid values are Hot, Cool, or Archive.
+ * @param {BlobSetTierOptions} [options] Optional options to the Blob Set Tier operation.
+ * @returns {Promise}
+ * @memberof BlobClient
+ */
+ BlobClient.prototype.setAccessTier = function (tier, options) {
+ var _a;
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _c, span, spanOptions, conditions, res, e_35;
- return tslib.__generator(this, function (_d) {
- switch (_d.label) {
+ var _b, span, spanOptions, e_14;
+ return tslib.__generator(this, function (_c) {
+ switch (_c.label) {
case 0:
- _c = createSpan("PageBlobClient-createIfNotExists", options.tracingOptions), span = _c.span, spanOptions = _c.spanOptions;
- _d.label = 1;
+ _b = createSpan("BlobClient-setAccessTier", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
+ _c.label = 1;
case 1:
- _d.trys.push([1, 3, 4, 5]);
- conditions = { ifNoneMatch: ETagAny };
- return [4 /*yield*/, this.create(size, tslib.__assign(tslib.__assign({}, options), { conditions: conditions, tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
- case 2:
- res = _d.sent();
- return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable
- })];
+ _c.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this.blobContext.setTier(toAccessTier(tier), {
+ abortSignal: options.abortSignal,
+ leaseAccessConditions: options.conditions,
+ modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+ rehydratePriority: options.rehydratePriority,
+ spanOptions: spanOptions
+ })];
+ case 2: return [2 /*return*/, _c.sent()];
case 3:
- e_35 = _d.sent();
- if (((_a = e_35.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") {
- span.setStatus({
- code: api.CanonicalCode.ALREADY_EXISTS,
- message: "Expected exception when creating a blob only if it does not already exist."
- });
- return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_35.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_35.response })];
- }
+ e_14 = _c.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_35.message
+ message: e_14.message
});
- throw e_35;
+ throw e_14;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -50221,113 +50075,293 @@ var PageBlobClient = /** @class */ (function (_super) {
});
});
};
+ BlobClient.prototype.downloadToBuffer = function (param1, param2, param3, param4) {
+ if (param4 === void 0) { param4 = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var buffer, offset, count, options, _a, span, spanOptions, response, transferProgress_1, batch, _loop_1, off, e_15;
+ var _this = this;
+ return tslib.__generator(this, function (_b) {
+ switch (_b.label) {
+ case 0:
+ offset = 0;
+ count = 0;
+ options = param4;
+ if (param1 instanceof Buffer) {
+ buffer = param1;
+ offset = param2 || 0;
+ count = typeof param3 === "number" ? param3 : 0;
+ }
+ else {
+ offset = typeof param1 === "number" ? param1 : 0;
+ count = typeof param2 === "number" ? param2 : 0;
+ options = param3 || {};
+ }
+ _a = createSpan("BlobClient-downloadToBuffer", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _b.label = 1;
+ case 1:
+ _b.trys.push([1, 5, 6, 7]);
+ if (!options.blockSize) {
+ options.blockSize = 0;
+ }
+ if (options.blockSize < 0) {
+ throw new RangeError("blockSize option must be >= 0");
+ }
+ if (options.blockSize === 0) {
+ options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES;
+ }
+ if (offset < 0) {
+ throw new RangeError("offset option must be >= 0");
+ }
+ if (count && count <= 0) {
+ throw new RangeError("count option must be > 0");
+ }
+ if (!options.conditions) {
+ options.conditions = {};
+ }
+ if (!!count) return [3 /*break*/, 3];
+ return [4 /*yield*/, this.getProperties(tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
+ case 2:
+ response = _b.sent();
+ count = response.contentLength - offset;
+ if (count < 0) {
+ throw new RangeError("offset " + offset + " shouldn't be larger than blob size " + response.contentLength);
+ }
+ _b.label = 3;
+ case 3:
+ // Allocate the buffer of size = count if the buffer is not provided
+ if (!buffer) {
+ try {
+ buffer = Buffer.alloc(count);
+ }
+ catch (error) {
+ throw new Error("Unable to allocate the buffer of size: " + count + "(in bytes). Please try passing your own buffer to the \"downloadToBuffer\" method or try using other methods like \"download\" or \"downloadToFile\".\t " + error.message);
+ }
+ }
+ if (buffer.length < count) {
+ throw new RangeError("The buffer's size should be equal to or larger than the request count of bytes: " + count);
+ }
+ transferProgress_1 = 0;
+ batch = new Batch(options.concurrency);
+ _loop_1 = function (off) {
+ batch.addOperation(function () { return tslib.__awaiter(_this, void 0, void 0, function () {
+ var chunkEnd, response, stream;
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0:
+ chunkEnd = offset + count;
+ if (off + options.blockSize < chunkEnd) {
+ chunkEnd = off + options.blockSize;
+ }
+ return [4 /*yield*/, this.download(off, chunkEnd - off, {
+ abortSignal: options.abortSignal,
+ conditions: options.conditions,
+ maxRetryRequests: options.maxRetryRequestsPerBlock,
+ customerProvidedKey: options.customerProvidedKey,
+ tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions })
+ })];
+ case 1:
+ response = _a.sent();
+ stream = response.readableStreamBody;
+ return [4 /*yield*/, streamToBuffer(stream, buffer, off - offset, chunkEnd - offset)];
+ case 2:
+ _a.sent();
+ // Update progress after block is downloaded, in case of block trying
+ // Could provide finer grained progress updating inside HTTP requests,
+ // only if convenience layer download try is enabled
+ transferProgress_1 += chunkEnd - off;
+ if (options.onProgress) {
+ options.onProgress({ loadedBytes: transferProgress_1 });
+ }
+ return [2 /*return*/];
+ }
+ });
+ }); });
+ };
+ for (off = offset; off < offset + count; off = off + options.blockSize) {
+ _loop_1(off);
+ }
+ return [4 /*yield*/, batch.do()];
+ case 4:
+ _b.sent();
+ return [2 /*return*/, buffer];
+ case 5:
+ e_15 = _b.sent();
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_15.message
+ });
+ throw e_15;
+ case 6:
+ span.end();
+ return [7 /*endfinally*/];
+ case 7: return [2 /*return*/];
+ }
+ });
+ });
+ };
/**
- * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512.
- * @see https://docs.microsoft.com/rest/api/storageservices/put-page
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
*
- * @param {HttpRequestBody} body Data to upload
- * @param {number} offset Offset of destination page blob
- * @param {number} count Content length of the body, also number of bytes to be uploaded
- * @param {PageBlobUploadPagesOptions} [options] Options to the Page Blob Upload Pages operation.
- * @returns {Promise} Response data for the Page Blob Upload Pages operation.
- * @memberof PageBlobClient
+ * Downloads an Azure Blob to a local file.
+ * Fails if the the given file path already exits.
+ * Offset and count are optional, pass 0 and undefined respectively to download the entire blob.
+ *
+ * @param {string} filePath
+ * @param {number} [offset] From which position of the block blob to download.
+ * @param {number} [count] How much data to be downloaded. Will download to the end when passing undefined.
+ * @param {BlobDownloadOptions} [options] Options to Blob download options.
+ * @returns {Promise} The response data for blob download operation,
+ * but with readableStreamBody set to undefined since its
+ * content is already read and written into a local file
+ * at the specified path.
+ * @memberof BlobClient
*/
- PageBlobClient.prototype.uploadPages = function (body, offset, count, options) {
- var _a;
+ BlobClient.prototype.downloadToFile = function (filePath, offset, count, options) {
+ if (offset === void 0) { offset = 0; }
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, e_36;
- return tslib.__generator(this, function (_c) {
- switch (_c.label) {
+ var _a, span, spanOptions, response, e_16;
+ return tslib.__generator(this, function (_b) {
+ switch (_b.label) {
case 0:
- options.conditions = options.conditions || {};
- _b = createSpan("PageBlobClient-uploadPages", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
- _c.label = 1;
+ _a = createSpan("BlobClient-downloadToFile", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _b.label = 1;
case 1:
- _c.trys.push([1, 3, 4, 5]);
- ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
- return [4 /*yield*/, this.pageBlobContext.uploadPages(body, count, {
- abortSignal: options.abortSignal,
- leaseAccessConditions: options.conditions,
- modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
- onUploadProgress: options.onProgress,
- range: rangeToString({ offset: offset, count: count }),
- sequenceNumberAccessConditions: options.conditions,
- transactionalContentMD5: options.transactionalContentMD5,
- transactionalContentCrc64: options.transactionalContentCrc64,
- cpkInfo: options.customerProvidedKey,
- encryptionScope: options.encryptionScope,
- spanOptions: spanOptions
- })];
- case 2: return [2 /*return*/, _c.sent()];
+ _b.trys.push([1, 5, 6, 7]);
+ return [4 /*yield*/, this.download(offset, count, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
+ case 2:
+ response = _b.sent();
+ if (!response.readableStreamBody) return [3 /*break*/, 4];
+ return [4 /*yield*/, readStreamToLocalFile(response.readableStreamBody, filePath)];
case 3:
- e_36 = _c.sent();
+ _b.sent();
+ _b.label = 4;
+ case 4:
+ // The stream is no longer accessible so setting it to undefined.
+ response.blobDownloadStream = undefined;
+ return [2 /*return*/, response];
+ case 5:
+ e_16 = _b.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_36.message
+ message: e_16.message
});
- throw e_36;
- case 4:
+ throw e_16;
+ case 6:
span.end();
return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
+ case 7: return [2 /*return*/];
}
});
});
};
+ BlobClient.prototype.getBlobAndContainerNamesFromUrl = function () {
+ var containerName;
+ var blobName;
+ try {
+ // URL may look like the following
+ // "https://myaccount.blob.core.windows.net/mycontainer/blob?sasString";
+ // "https://myaccount.blob.core.windows.net/mycontainer/blob";
+ // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt?sasString";
+ // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt";
+ // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob`
+ // http://localhost:10001/devstoreaccount1/containername/blob
+ var parsedUrl = coreHttp.URLBuilder.parse(this.url);
+ if (parsedUrl.getHost().split(".")[1] === "blob") {
+ // "https://myaccount.blob.core.windows.net/containername/blob".
+ // .getPath() -> /containername/blob
+ var pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?");
+ containerName = pathComponents[1];
+ blobName = pathComponents[3];
+ }
+ else if (isIpEndpointStyle(parsedUrl)) {
+ // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob
+ // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob
+ // .getPath() -> /devstoreaccount1/containername/blob
+ var pathComponents = parsedUrl.getPath().match("/([^/]*)/([^/]*)(/(.*))?");
+ containerName = pathComponents[2];
+ blobName = pathComponents[4];
+ }
+ else {
+ // "https://customdomain.com/containername/blob".
+ // .getPath() -> /containername/blob
+ var pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?");
+ containerName = pathComponents[1];
+ blobName = pathComponents[3];
+ }
+ // decode the encoded blobName, containerName - to get all the special characters that might be present in them
+ containerName = decodeURIComponent(containerName);
+ blobName = decodeURIComponent(blobName);
+ // Azure Storage Server will replace "\" with "/" in the blob names
+ // doing the same in the SDK side so that the user doesn't have to replace "\" instances in the blobName
+ blobName = blobName.replace(/\\/g, "/");
+ if (!blobName) {
+ throw new Error("Provided blobName is invalid.");
+ }
+ else if (!containerName) {
+ throw new Error("Provided containerName is invalid.");
+ }
+ return { blobName: blobName, containerName: containerName };
+ }
+ catch (error) {
+ throw new Error("Unable to extract blobName and containerName with provided information.");
+ }
+ };
/**
- * The Upload Pages operation writes a range of pages to a page blob where the
- * contents are read from a URL.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url
+ * Asynchronously copies a blob to a destination within the storage account.
+ * In version 2012-02-12 and later, the source for a Copy Blob operation can be
+ * a committed blob in any Azure storage account.
+ * Beginning with version 2015-02-21, the source for a Copy Blob operation can be
+ * an Azure file in any Azure storage account.
+ * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob
+ * operation to copy from another storage account.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob
*
- * @param {string} sourceURL Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication
- * @param {number} sourceOffset The source offset to copy from. Pass 0 to copy from the beginning of source page blob
- * @param {number} destOffset Offset of destination page blob
- * @param {number} count Number of bytes to be uploaded from source page blob
- * @param {PageBlobUploadPagesFromURLOptions} [options={}]
- * @returns {Promise}
- * @memberof PageBlobClient
+ * @param {string} copySource url to the source Azure Blob/File.
+ * @param {BlobStartCopyFromURLOptions} [options] Optional options to the Blob Start Copy From URL operation.
+ * @returns {Promise}
+ * @memberof BlobClient
*/
- PageBlobClient.prototype.uploadPagesFromURL = function (sourceURL, sourceOffset, destOffset, count, options) {
+ BlobClient.prototype.startCopyFromURL = function (copySource, options) {
var _a;
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, e_37;
+ var _b, span, spanOptions, e_17;
return tslib.__generator(this, function (_c) {
switch (_c.label) {
case 0:
+ _b = createSpan("BlobClient-startCopyFromURL", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
options.conditions = options.conditions || {};
options.sourceConditions = options.sourceConditions || {};
- _b = createSpan("PageBlobClient-uploadPagesFromURL", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
_c.label = 1;
case 1:
_c.trys.push([1, 3, 4, 5]);
- ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
- return [4 /*yield*/, this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count: count }), 0, rangeToString({ offset: destOffset, count: count }), {
+ return [4 /*yield*/, this.blobContext.startCopyFromURL(copySource, {
abortSignal: options.abortSignal,
- sourceContentMD5: options.sourceContentMD5,
- sourceContentCrc64: options.sourceContentCrc64,
leaseAccessConditions: options.conditions,
- sequenceNumberAccessConditions: options.conditions,
+ metadata: options.metadata,
modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
sourceModifiedAccessConditions: {
sourceIfMatch: options.sourceConditions.ifMatch,
sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,
sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,
- sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince
+ sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,
+ sourceIfTags: options.sourceConditions.tagConditions
},
- cpkInfo: options.customerProvidedKey,
- encryptionScope: options.encryptionScope,
+ rehydratePriority: options.rehydratePriority,
+ tier: toAccessTier(options.tier),
+ blobTagsString: toBlobTagsString(options.tags),
+ sealBlob: options.sealBlob,
spanOptions: spanOptions
})];
case 2: return [2 /*return*/, _c.sent()];
case 3:
- e_37 = _c.sent();
+ e_17 = _c.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_37.message
+ message: e_17.message
});
- throw e_37;
+ throw e_17;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -50336,48 +50370,138 @@ var PageBlobClient = /** @class */ (function (_super) {
});
});
};
+ return BlobClient;
+}(StorageClient));
+/**
+ * AppendBlobClient defines a set of operations applicable to append blobs.
+ *
+ * @export
+ * @class AppendBlobClient
+ * @extends {BlobClient}
+ */
+var AppendBlobClient = /** @class */ (function (_super) {
+ tslib.__extends(AppendBlobClient, _super);
+ function AppendBlobClient(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) {
+ var _this = this;
+ // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.
+ // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);
+ var pipeline;
+ var url;
+ options = options || {};
+ if (credentialOrPipelineOrContainerName instanceof Pipeline) {
+ // (url: string, pipeline: Pipeline)
+ url = urlOrConnectionString;
+ pipeline = credentialOrPipelineOrContainerName;
+ }
+ else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||
+ credentialOrPipelineOrContainerName instanceof AnonymousCredential ||
+ coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) {
+ // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString;
+ url = urlOrConnectionString;
+ options = blobNameOrOptions;
+ pipeline = newPipeline(credentialOrPipelineOrContainerName, options);
+ }
+ else if (!credentialOrPipelineOrContainerName &&
+ typeof credentialOrPipelineOrContainerName !== "string") {
+ // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)
+ url = urlOrConnectionString;
+ // The second parameter is undefined. Use anonymous credential.
+ pipeline = newPipeline(new AnonymousCredential(), options);
+ }
+ else if (credentialOrPipelineOrContainerName &&
+ typeof credentialOrPipelineOrContainerName === "string" &&
+ blobNameOrOptions &&
+ typeof blobNameOrOptions === "string") {
+ // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)
+ var containerName = credentialOrPipelineOrContainerName;
+ var blobName = blobNameOrOptions;
+ var extractedCreds = extractConnectionStringParts(urlOrConnectionString);
+ if (extractedCreds.kind === "AccountConnString") {
+ {
+ var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);
+ url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName));
+ options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri);
+ pipeline = newPipeline(sharedKeyCredential, options);
+ }
+ }
+ else if (extractedCreds.kind === "SASConnString") {
+ url =
+ appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) +
+ "?" +
+ extractedCreds.accountSas;
+ pipeline = newPipeline(new AnonymousCredential(), options);
+ }
+ else {
+ throw new Error("Connection string must be either an Account connection string or a SAS connection string");
+ }
+ }
+ else {
+ throw new Error("Expecting non-empty strings for containerName and blobName parameters");
+ }
+ _this = _super.call(this, url, pipeline) || this;
+ _this.appendBlobContext = new AppendBlob(_this.storageClientContext);
+ return _this;
+ }
+ /**
+ * Creates a new AppendBlobClient object identical to the source but with the
+ * specified snapshot timestamp.
+ * Provide "" will remove the snapshot and return a Client to the base blob.
+ *
+ * @param {string} snapshot The snapshot timestamp.
+ * @returns {AppendBlobClient} A new AppendBlobClient object identical to the source but with the specified snapshot timestamp.
+ * @memberof AppendBlobClient
+ */
+ AppendBlobClient.prototype.withSnapshot = function (snapshot) {
+ return new AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline);
+ };
/**
- * Frees the specified pages from the page blob.
- * @see https://docs.microsoft.com/rest/api/storageservices/put-page
+ * Creates a 0-length append blob. Call AppendBlock to append data to an append blob.
+ * @see https://docs.microsoft.com/rest/api/storageservices/put-blob
*
- * @param {number} [offset] Starting byte position of the pages to clear.
- * @param {number} [count] Number of bytes to clear.
- * @param {PageBlobClearPagesOptions} [options] Options to the Page Blob Clear Pages operation.
- * @returns {Promise} Response data for the Page Blob Clear Pages operation.
- * @memberof PageBlobClient
+ * @param {AppendBlobCreateOptions} [options] Options to the Append Block Create operation.
+ * @returns {Promise}
+ * @memberof AppendBlobClient
+ *
+ * Example usage:
+ *
+ * ```js
+ * const appendBlobClient = containerClient.getAppendBlobClient("");
+ * await appendBlobClient.create();
+ * ```
*/
- PageBlobClient.prototype.clearPages = function (offset, count, options) {
+ AppendBlobClient.prototype.create = function (options) {
var _a;
- if (offset === void 0) { offset = 0; }
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, e_38;
+ var _b, span, spanOptions, e_18;
return tslib.__generator(this, function (_c) {
switch (_c.label) {
case 0:
+ _b = createSpan("AppendBlobClient-create", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
options.conditions = options.conditions || {};
- _b = createSpan("PageBlobClient-clearPages", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
_c.label = 1;
case 1:
_c.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.pageBlobContext.clearPages(0, {
+ ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+ return [4 /*yield*/, this.appendBlobContext.create(0, {
abortSignal: options.abortSignal,
+ blobHTTPHeaders: options.blobHTTPHeaders,
leaseAccessConditions: options.conditions,
+ metadata: options.metadata,
modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
- range: rangeToString({ offset: offset, count: count }),
- sequenceNumberAccessConditions: options.conditions,
cpkInfo: options.customerProvidedKey,
encryptionScope: options.encryptionScope,
+ blobTagsString: toBlobTagsString(options.tags),
spanOptions: spanOptions
})];
case 2: return [2 /*return*/, _c.sent()];
case 3:
- e_38 = _c.sent();
+ e_18 = _c.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_38.message
+ message: e_18.message
});
- throw e_38;
+ throw e_18;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -50387,46 +50511,46 @@ var PageBlobClient = /** @class */ (function (_super) {
});
};
/**
- * Returns the list of valid page ranges for a page blob or snapshot of a page blob.
- * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges
+ * Creates a 0-length append blob. Call AppendBlock to append data to an append blob.
+ * If the blob with the same name already exists, the content of the existing blob will remain unchanged.
+ * @see https://docs.microsoft.com/rest/api/storageservices/put-blob
*
- * @param {number} [offset] Starting byte position of the page ranges.
- * @param {number} [count] Number of bytes to get.
- * @param {PageBlobGetPageRangesOptions} [options] Options to the Page Blob Get Ranges operation.
- * @returns {Promise} Response data for the Page Blob Get Ranges operation.
- * @memberof PageBlobClient
+ * @param {AppendBlobCreateIfNotExistsOptions} [options]
+ * @returns {Promise}
+ * @memberof AppendBlobClient
*/
- PageBlobClient.prototype.getPageRanges = function (offset, count, options) {
- var _a;
- if (offset === void 0) { offset = 0; }
+ AppendBlobClient.prototype.createIfNotExists = function (options) {
+ var _a, _b;
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, e_39;
- return tslib.__generator(this, function (_c) {
- switch (_c.label) {
+ var _c, span, spanOptions, conditions, res, e_19;
+ return tslib.__generator(this, function (_d) {
+ switch (_d.label) {
case 0:
- options.conditions = options.conditions || {};
- _b = createSpan("PageBlobClient-getPageRanges", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
- _c.label = 1;
+ _c = createSpan("AppendBlobClient-createIfNotExists", options.tracingOptions), span = _c.span, spanOptions = _c.spanOptions;
+ conditions = { ifNoneMatch: ETagAny };
+ _d.label = 1;
case 1:
- _c.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.pageBlobContext
- .getPageRanges({
- abortSignal: options.abortSignal,
- leaseAccessConditions: options.conditions,
- modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
- range: rangeToString({ offset: offset, count: count }),
- spanOptions: spanOptions
- })
- .then(rangeResponseFromModel)];
- case 2: return [2 /*return*/, _c.sent()];
+ _d.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this.create(tslib.__assign(tslib.__assign({}, options), { conditions: conditions, tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
+ case 2:
+ res = _d.sent();
+ return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable
+ })];
case 3:
- e_39 = _c.sent();
+ e_19 = _d.sent();
+ if (((_a = e_19.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") {
+ span.setStatus({
+ code: api.CanonicalCode.ALREADY_EXISTS,
+ message: "Expected exception when creating a blob only if it does not already exist."
+ });
+ return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_19.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_19.response })];
+ }
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_39.message
+ message: e_19.message
});
- throw e_39;
+ throw e_19;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -50436,47 +50560,40 @@ var PageBlobClient = /** @class */ (function (_super) {
});
};
/**
- * Gets the collection of page ranges that differ between a specified snapshot and this page blob.
- * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges
+ * Seals the append blob, making it read only.
*
- * @param {number} offset Starting byte position of the page blob
- * @param {number} count Number of bytes to get ranges diff.
- * @param {string} prevSnapshot Timestamp of snapshot to retrieve the difference.
- * @param {PageBlobGetPageRangesDiffOptions} [options] Options to the Page Blob Get Page Ranges Diff operation.
- * @returns {Promise} Response data for the Page Blob Get Page Range Diff operation.
- * @memberof PageBlobClient
+ * @param {AppendBlobSealOptions} [options={}]
+ * @returns {Promise}
+ * @memberof AppendBlobClient
*/
- PageBlobClient.prototype.getPageRangesDiff = function (offset, count, prevSnapshot, options) {
+ AppendBlobClient.prototype.seal = function (options) {
var _a;
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, e_40;
+ var _b, span, spanOptions, e_20;
return tslib.__generator(this, function (_c) {
switch (_c.label) {
case 0:
+ _b = createSpan("AppendBlobClient-seal", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
options.conditions = options.conditions || {};
- _b = createSpan("PageBlobClient-getPageRangesDiff", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
_c.label = 1;
case 1:
_c.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.pageBlobContext
- .getPageRangesDiff({
+ return [4 /*yield*/, this.appendBlobContext.seal({
abortSignal: options.abortSignal,
+ appendPositionAccessConditions: options.conditions,
leaseAccessConditions: options.conditions,
modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
- prevsnapshot: prevSnapshot,
- range: rangeToString({ offset: offset, count: count }),
spanOptions: spanOptions
- })
- .then(rangeResponseFromModel)];
+ })];
case 2: return [2 /*return*/, _c.sent()];
case 3:
- e_40 = _c.sent();
+ e_20 = _c.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_40.message
+ message: e_20.message
});
- throw e_40;
+ throw e_20;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -50486,47 +50603,64 @@ var PageBlobClient = /** @class */ (function (_super) {
});
};
/**
- * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks.
- * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges
+ * Commits a new block of data to the end of the existing append blob.
+ * @see https://docs.microsoft.com/rest/api/storageservices/append-block
*
- * @param {number} offset Starting byte position of the page blob
- * @param {number} count Number of bytes to get ranges diff.
- * @param {string} prevSnapshotUrl URL of snapshot to retrieve the difference.
- * @param {PageBlobGetPageRangesDiffOptions} [options] Options to the Page Blob Get Page Ranges Diff operation.
- * @returns {Promise} Response data for the Page Blob Get Page Range Diff operation.
- * @memberof PageBlobClient
+ * @param {HttpRequestBody} body Data to be appended.
+ * @param {number} contentLength Length of the body in bytes.
+ * @param {AppendBlobAppendBlockOptions} [options] Options to the Append Block operation.
+ * @returns {Promise}
+ * @memberof AppendBlobClient
+ *
+ * Example usage:
+ *
+ * ```js
+ * const content = "Hello World!";
+ *
+ * // Create a new append blob and append data to the blob.
+ * const newAppendBlobClient = containerClient.getAppendBlobClient("");
+ * await newAppendBlobClient.create();
+ * await newAppendBlobClient.appendBlock(content, content.length);
+ *
+ * // Append data to an existing append blob.
+ * const existingAppendBlobClient = containerClient.getAppendBlobClient("");
+ * await existingAppendBlobClient.appendBlock(content, content.length);
+ * ```
*/
- PageBlobClient.prototype.getPageRangesDiffForManagedDisks = function (offset, count, prevSnapshotUrl, options) {
+ AppendBlobClient.prototype.appendBlock = function (body, contentLength, options) {
var _a;
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, e_41;
+ var _b, span, spanOptions, e_21;
return tslib.__generator(this, function (_c) {
switch (_c.label) {
case 0:
+ _b = createSpan("AppendBlobClient-appendBlock", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
options.conditions = options.conditions || {};
- _b = createSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
_c.label = 1;
case 1:
_c.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.pageBlobContext
- .getPageRangesDiff({
+ ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+ return [4 /*yield*/, this.appendBlobContext.appendBlock(body, contentLength, {
abortSignal: options.abortSignal,
+ appendPositionAccessConditions: options.conditions,
leaseAccessConditions: options.conditions,
modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
- prevSnapshotUrl: prevSnapshotUrl,
- range: rangeToString({ offset: offset, count: count }),
+ onUploadProgress: options.onProgress,
+ transactionalContentMD5: options.transactionalContentMD5,
+ transactionalContentCrc64: options.transactionalContentCrc64,
+ cpkInfo: options.customerProvidedKey,
+ encryptionScope: options.encryptionScope,
spanOptions: spanOptions
- })
- .then(rangeResponseFromModel)];
+ })];
case 2: return [2 /*return*/, _c.sent()];
case 3:
- e_41 = _c.sent();
+ e_21 = _c.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_41.message
+ message: e_21.message
});
- throw e_41;
+ throw e_21;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -50536,42 +50670,62 @@ var PageBlobClient = /** @class */ (function (_super) {
});
};
/**
- * Resizes the page blob to the specified size (which must be a multiple of 512).
- * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties
+ * The Append Block operation commits a new block of data to the end of an existing append blob
+ * where the contents are read from a source url.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url
*
- * @param {number} size Target size
- * @param {PageBlobResizeOptions} [options] Options to the Page Blob Resize operation.
- * @returns {Promise} Response data for the Page Blob Resize operation.
- * @memberof PageBlobClient
+ * @param {string} sourceURL
+ * The url to the blob that will be the source of the copy. A source blob in the same storage account can
+ * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob
+ * must either be public or must be authenticated via a shared access signature. If the source blob is
+ * public, no authentication is required to perform the operation.
+ * @param {number} sourceOffset Offset in source to be appended
+ * @param {number} count Number of bytes to be appended as a block
+ * @param {AppendBlobAppendBlockFromURLOptions} [options={}]
+ * @returns {Promise}
+ * @memberof AppendBlobClient
*/
- PageBlobClient.prototype.resize = function (size, options) {
+ AppendBlobClient.prototype.appendBlockFromURL = function (sourceURL, sourceOffset, count, options) {
var _a;
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, e_42;
+ var _b, span, spanOptions, e_22;
return tslib.__generator(this, function (_c) {
switch (_c.label) {
case 0:
+ _b = createSpan("AppendBlobClient-appendBlockFromURL", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
options.conditions = options.conditions || {};
- _b = createSpan("PageBlobClient-resize", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
+ options.sourceConditions = options.sourceConditions || {};
_c.label = 1;
case 1:
_c.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.pageBlobContext.resize(size, {
+ ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+ return [4 /*yield*/, this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, {
abortSignal: options.abortSignal,
+ sourceRange: rangeToString({ offset: sourceOffset, count: count }),
+ sourceContentMD5: options.sourceContentMD5,
+ sourceContentCrc64: options.sourceContentCrc64,
leaseAccessConditions: options.conditions,
+ appendPositionAccessConditions: options.conditions,
modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+ sourceModifiedAccessConditions: {
+ sourceIfMatch: options.sourceConditions.ifMatch,
+ sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,
+ sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,
+ sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince
+ },
+ cpkInfo: options.customerProvidedKey,
encryptionScope: options.encryptionScope,
spanOptions: spanOptions
})];
case 2: return [2 /*return*/, _c.sent()];
case 3:
- e_42 = _c.sent();
+ e_22 = _c.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_42.message
+ message: e_22.message
});
- throw e_42;
+ throw e_22;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -50580,44 +50734,161 @@ var PageBlobClient = /** @class */ (function (_super) {
});
});
};
+ return AppendBlobClient;
+}(BlobClient));
+/**
+ * BlockBlobClient defines a set of operations applicable to block blobs.
+ *
+ * @export
+ * @class BlockBlobClient
+ * @extends {BlobClient}
+ */
+var BlockBlobClient = /** @class */ (function (_super) {
+ tslib.__extends(BlockBlobClient, _super);
+ function BlockBlobClient(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) {
+ var _this = this;
+ // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.
+ // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);
+ var pipeline;
+ var url;
+ options = options || {};
+ if (credentialOrPipelineOrContainerName instanceof Pipeline) {
+ // (url: string, pipeline: Pipeline)
+ url = urlOrConnectionString;
+ pipeline = credentialOrPipelineOrContainerName;
+ }
+ else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||
+ credentialOrPipelineOrContainerName instanceof AnonymousCredential ||
+ coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) {
+ // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)
+ url = urlOrConnectionString;
+ options = blobNameOrOptions;
+ pipeline = newPipeline(credentialOrPipelineOrContainerName, options);
+ }
+ else if (!credentialOrPipelineOrContainerName &&
+ typeof credentialOrPipelineOrContainerName !== "string") {
+ // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)
+ // The second parameter is undefined. Use anonymous credential.
+ url = urlOrConnectionString;
+ pipeline = newPipeline(new AnonymousCredential(), options);
+ }
+ else if (credentialOrPipelineOrContainerName &&
+ typeof credentialOrPipelineOrContainerName === "string" &&
+ blobNameOrOptions &&
+ typeof blobNameOrOptions === "string") {
+ // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)
+ var containerName = credentialOrPipelineOrContainerName;
+ var blobName = blobNameOrOptions;
+ var extractedCreds = extractConnectionStringParts(urlOrConnectionString);
+ if (extractedCreds.kind === "AccountConnString") {
+ {
+ var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);
+ url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName));
+ options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri);
+ pipeline = newPipeline(sharedKeyCredential, options);
+ }
+ }
+ else if (extractedCreds.kind === "SASConnString") {
+ url =
+ appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) +
+ "?" +
+ extractedCreds.accountSas;
+ pipeline = newPipeline(new AnonymousCredential(), options);
+ }
+ else {
+ throw new Error("Connection string must be either an Account connection string or a SAS connection string");
+ }
+ }
+ else {
+ throw new Error("Expecting non-empty strings for containerName and blobName parameters");
+ }
+ _this = _super.call(this, url, pipeline) || this;
+ _this.blockBlobContext = new BlockBlob(_this.storageClientContext);
+ _this._blobContext = new Blob$1(_this.storageClientContext);
+ return _this;
+ }
/**
- * Sets a page blob's sequence number.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties
+ * Creates a new BlockBlobClient object identical to the source but with the
+ * specified snapshot timestamp.
+ * Provide "" will remove the snapshot and return a URL to the base blob.
*
- * @param {SequenceNumberActionType} sequenceNumberAction Indicates how the service should modify the blob's sequence number.
- * @param {number} [sequenceNumber] Required if sequenceNumberAction is max or update
- * @param {PageBlobUpdateSequenceNumberOptions} [options] Options to the Page Blob Update Sequence Number operation.
- * @returns {Promise} Response data for the Page Blob Update Sequence Number operation.
- * @memberof PageBlobClient
+ * @param {string} snapshot The snapshot timestamp.
+ * @returns {BlockBlobClient} A new BlockBlobClient object identical to the source but with the specified snapshot timestamp.
+ * @memberof BlockBlobClient
*/
- PageBlobClient.prototype.updateSequenceNumber = function (sequenceNumberAction, sequenceNumber, options) {
+ BlockBlobClient.prototype.withSnapshot = function (snapshot) {
+ return new BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline);
+ };
+ /**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ *
+ * Quick query for a JSON or CSV formatted blob.
+ *
+ * Example usage (Node.js):
+ *
+ * ```js
+ * // Query and convert a blob to a string
+ * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage");
+ * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString();
+ * console.log("Query blob content:", downloaded);
+ *
+ * async function streamToBuffer(readableStream) {
+ * return new Promise((resolve, reject) => {
+ * const chunks = [];
+ * readableStream.on("data", (data) => {
+ * chunks.push(data instanceof Buffer ? data : Buffer.from(data));
+ * });
+ * readableStream.on("end", () => {
+ * resolve(Buffer.concat(chunks));
+ * });
+ * readableStream.on("error", reject);
+ * });
+ * }
+ * ```
+ *
+ * @param {string} query
+ * @param {BlockBlobQueryOptions} [options={}]
+ * @returns {Promise}
+ * @memberof BlockBlobClient
+ */
+ BlockBlobClient.prototype.query = function (query, options) {
var _a;
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, e_43;
+ var _b, span, spanOptions, response, e_23;
return tslib.__generator(this, function (_c) {
switch (_c.label) {
case 0:
- options.conditions = options.conditions || {};
- _b = createSpan("PageBlobClient-updateSequenceNumber", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
+ ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+ _b = createSpan("BlockBlobClient-query", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
_c.label = 1;
case 1:
_c.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, {
+ return [4 /*yield*/, this._blobContext.query({
abortSignal: options.abortSignal,
- blobSequenceNumber: sequenceNumber,
+ queryRequest: {
+ expression: query,
+ inputSerialization: toQuerySerialization(options.inputTextConfiguration),
+ outputSerialization: toQuerySerialization(options.outputTextConfiguration)
+ },
leaseAccessConditions: options.conditions,
modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
spanOptions: spanOptions
})];
- case 2: return [2 /*return*/, _c.sent()];
+ case 2:
+ response = _c.sent();
+ return [2 /*return*/, new BlobQueryResponse(response, {
+ abortSignal: options.abortSignal,
+ onProgress: options.onProgress,
+ onError: options.onError
+ })];
case 3:
- e_43 = _c.sent();
+ e_23 = _c.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_43.message
+ message: e_23.message
});
- throw e_43;
+ throw e_23;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -50627,44 +50898,68 @@ var PageBlobClient = /** @class */ (function (_super) {
});
};
/**
- * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob.
- * The snapshot is copied such that only the differential changes between the previously
- * copied snapshot are transferred to the destination.
- * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual.
- * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob
- * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots
+ * Creates a new block blob, or updates the content of an existing block blob.
+ * Updating an existing block blob overwrites any existing metadata on the blob.
+ * Partial updates are not supported; the content of the existing blob is
+ * overwritten with the new content. To perform a partial update of a block blob's,
+ * use {@link stageBlock} and {@link commitBlockList}.
*
- * @param {string} copySource Specifies the name of the source page blob snapshot. For example,
- * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=
- * @param {PageBlobStartCopyIncrementalOptions} [options] Options to the Page Blob Copy Incremental operation.
- * @returns {Promise} Response data for the Page Blob Copy Incremental operation.
- * @memberof PageBlobClient
+ * This is a non-parallel uploading method, please use {@link uploadFile},
+ * {@link uploadStream} or {@link uploadBrowserData} for better performance
+ * with concurrency uploading.
+ *
+ * @see https://docs.microsoft.com/rest/api/storageservices/put-blob
+ *
+ * @param {HttpRequestBody} body Blob, string, ArrayBuffer, ArrayBufferView or a function
+ * which returns a new Readable stream whose offset is from data source beginning.
+ * @param {number} contentLength Length of body in bytes. Use Buffer.byteLength() to calculate body length for a
+ * string including non non-Base64/Hex-encoded characters.
+ * @param {BlockBlobUploadOptions} [options] Options to the Block Blob Upload operation.
+ * @returns {Promise} Response data for the Block Blob Upload operation.
+ * @memberof BlockBlobClient
+ *
+ * Example usage:
+ *
+ * ```js
+ * const content = "Hello world!";
+ * const uploadBlobResponse = await blockBlobClient.upload(content, content.length);
+ * ```
*/
- PageBlobClient.prototype.startCopyIncremental = function (copySource, options) {
+ BlockBlobClient.prototype.upload = function (body, contentLength, options) {
var _a;
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _b, span, spanOptions, e_44;
+ var _b, span, spanOptions, e_24;
return tslib.__generator(this, function (_c) {
switch (_c.label) {
case 0:
- _b = createSpan("PageBlobClient-startCopyIncremental", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
+ options.conditions = options.conditions || {};
+ _b = createSpan("BlockBlobClient-upload", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
_c.label = 1;
case 1:
_c.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.pageBlobContext.copyIncremental(copySource, {
+ ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+ return [4 /*yield*/, this.blockBlobContext.upload(body, contentLength, {
abortSignal: options.abortSignal,
+ blobHTTPHeaders: options.blobHTTPHeaders,
+ leaseAccessConditions: options.conditions,
+ metadata: options.metadata,
modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+ onUploadProgress: options.onProgress,
+ cpkInfo: options.customerProvidedKey,
+ encryptionScope: options.encryptionScope,
+ tier: toAccessTier(options.tier),
+ blobTagsString: toBlobTagsString(options.tags),
spanOptions: spanOptions
})];
case 2: return [2 /*return*/, _c.sent()];
case 3:
- e_44 = _c.sent();
+ e_24 = _c.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_44.message
+ message: e_24.message
});
- throw e_44;
+ throw e_24;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -50673,110 +50968,48 @@ var PageBlobClient = /** @class */ (function (_super) {
});
});
};
- return PageBlobClient;
-}(BlobClient));
-/**
- * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}.
- *
- * @export
- * @class BlobLeaseClient
- */
-var BlobLeaseClient = /** @class */ (function () {
- /**
- * Creates an instance of BlobLeaseClient.
- * @param {(ContainerClient | BlobClient)} client The client to make the lease operation requests.
- * @param {string} leaseId Initial proposed lease id.
- * @memberof BlobLeaseClient
- */
- function BlobLeaseClient(client, leaseId) {
- var clientContext = new StorageClientContext(client.url, client.pipeline.toServiceClientOptions());
- this._url = client.url;
- if (client instanceof ContainerClient) {
- this._isContainer = true;
- this._containerOrBlobOperation = new Container(clientContext);
- }
- else {
- this._isContainer = false;
- this._containerOrBlobOperation = new Blob$1(clientContext);
- }
- if (!leaseId) {
- leaseId = coreHttp.generateUuid();
- }
- this._leaseId = leaseId;
- }
- Object.defineProperty(BlobLeaseClient.prototype, "leaseId", {
- /**
- * Gets the lease Id.
- *
- * @readonly
- * @memberof BlobLeaseClient
- * @type {string}
- */
- get: function () {
- return this._leaseId;
- },
- enumerable: false,
- configurable: true
- });
- Object.defineProperty(BlobLeaseClient.prototype, "url", {
- /**
- * Gets the url.
- *
- * @readonly
- * @memberof BlobLeaseClient
- * @type {string}
- */
- get: function () {
- return this._url;
- },
- enumerable: false,
- configurable: true
- });
/**
- * Establishes and manages a lock on a container for delete operations, or on a blob
- * for write and delete operations.
- * The lock duration can be 15 to 60 seconds, or can be infinite.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container
- * and
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob
+ * Uploads the specified block to the block blob's "staging area" to be later
+ * committed by a call to commitBlockList.
+ * @see https://docs.microsoft.com/rest/api/storageservices/put-block
*
- * @param {number} duration Must be between 15 to 60 seconds, or infinite (-1)
- * @param {LeaseOperationOptions} [options={}] option to configure lease management operations.
- * @returns {Promise} Response data for acquire lease operation.
- * @memberof BlobLeaseClient
+ * @param {string} blockId A 64-byte value that is base64-encoded
+ * @param {HttpRequestBody} body Data to upload to the staging area.
+ * @param {number} contentLength Number of bytes to upload.
+ * @param {BlockBlobStageBlockOptions} [options] Options to the Block Blob Stage Block operation.
+ * @returns {Promise} Response data for the Block Blob Stage Block operation.
+ * @memberof BlockBlobClient
*/
- BlobLeaseClient.prototype.acquireLease = function (duration, options) {
- var _a, _b, _c, _d, _e, _f;
+ BlockBlobClient.prototype.stageBlock = function (blockId, body, contentLength, options) {
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _g, span, spanOptions, e_45;
- return tslib.__generator(this, function (_h) {
- switch (_h.label) {
+ var _a, span, spanOptions, e_25;
+ return tslib.__generator(this, function (_b) {
+ switch (_b.label) {
case 0:
- _g = createSpan("BlobLeaseClient-acquireLease", options.tracingOptions), span = _g.span, spanOptions = _g.spanOptions;
- if (this._isContainer &&
- ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||
- (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {
- throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.");
- }
- _h.label = 1;
+ _a = createSpan("BlockBlobClient-stageBlock", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _b.label = 1;
case 1:
- _h.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this._containerOrBlobOperation.acquireLease({
+ _b.trys.push([1, 3, 4, 5]);
+ ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+ return [4 /*yield*/, this.blockBlobContext.stageBlock(blockId, contentLength, body, {
abortSignal: options.abortSignal,
- duration: duration,
- modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }),
- proposedLeaseId: this._leaseId,
+ leaseAccessConditions: options.conditions,
+ onUploadProgress: options.onProgress,
+ transactionalContentMD5: options.transactionalContentMD5,
+ transactionalContentCrc64: options.transactionalContentCrc64,
+ cpkInfo: options.customerProvidedKey,
+ encryptionScope: options.encryptionScope,
spanOptions: spanOptions
})];
- case 2: return [2 /*return*/, _h.sent()];
+ case 2: return [2 /*return*/, _b.sent()];
case 3:
- e_45 = _h.sent();
+ e_25 = _b.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_45.message
+ message: e_25.message
});
- throw e_45;
+ throw e_25;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -50786,49 +51019,58 @@ var BlobLeaseClient = /** @class */ (function () {
});
};
/**
- * To change the ID of the lease.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container
- * and
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob
+ * The Stage Block From URL operation creates a new block to be committed as part
+ * of a blob where the contents are read from a URL.
+ * This API is available starting in version 2018-03-28.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url
*
- * @param {string} proposedLeaseId the proposed new lease Id.
- * @param {LeaseOperationOptions} [options={}] option to configure lease management operations.
- * @returns {Promise} Response data for change lease operation.
- * @memberof BlobLeaseClient
- */
- BlobLeaseClient.prototype.changeLease = function (proposedLeaseId, options) {
- var _a, _b, _c, _d, _e, _f;
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var _g, span, spanOptions, response, e_46;
- return tslib.__generator(this, function (_h) {
- switch (_h.label) {
- case 0:
- _g = createSpan("BlobLeaseClient-changeLease", options.tracingOptions), span = _g.span, spanOptions = _g.spanOptions;
- if (this._isContainer &&
- ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||
- (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {
- throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.");
- }
- _h.label = 1;
+ * @param {string} blockId A 64-byte value that is base64-encoded
+ * @param {string} sourceURL Specifies the URL of the blob. The value
+ * may be a URL of up to 2 KB in length that specifies a blob.
+ * The value should be URL-encoded as it would appear
+ * in a request URI. The source blob must either be public
+ * or must be authenticated via a shared access signature.
+ * If the source blob is public, no authentication is required
+ * to perform the operation. Here are some examples of source object URLs:
+ * - https://myaccount.blob.core.windows.net/mycontainer/myblob
+ * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=
+ * @param {number} [offset] From which position of the blob to download, >= 0
+ * @param {number} [count] How much data to be downloaded, > 0. Will download to the end when undefined
+ * @param {BlockBlobStageBlockFromURLOptions} [options={}] Options to the Block Blob Stage Block From URL operation.
+ * @returns {Promise} Response data for the Block Blob Stage Block From URL operation.
+ * @memberof BlockBlobClient
+ */
+ BlockBlobClient.prototype.stageBlockFromURL = function (blockId, sourceURL, offset, count, options) {
+ if (offset === void 0) { offset = 0; }
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var _a, span, spanOptions, e_26;
+ return tslib.__generator(this, function (_b) {
+ switch (_b.label) {
+ case 0:
+ _a = createSpan("BlockBlobClient-stageBlockFromURL", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _b.label = 1;
case 1:
- _h.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, {
+ _b.trys.push([1, 3, 4, 5]);
+ ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+ return [4 /*yield*/, this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, {
abortSignal: options.abortSignal,
- modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }),
+ leaseAccessConditions: options.conditions,
+ sourceContentMD5: options.sourceContentMD5,
+ sourceContentCrc64: options.sourceContentCrc64,
+ sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset: offset, count: count }),
+ cpkInfo: options.customerProvidedKey,
+ encryptionScope: options.encryptionScope,
spanOptions: spanOptions
})];
- case 2:
- response = _h.sent();
- this._leaseId = proposedLeaseId;
- return [2 /*return*/, response];
+ case 2: return [2 /*return*/, _b.sent()];
case 3:
- e_46 = _h.sent();
+ e_26 = _b.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_46.message
+ message: e_26.message
});
- throw e_46;
+ throw e_26;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -50838,46 +51080,52 @@ var BlobLeaseClient = /** @class */ (function () {
});
};
/**
- * To free the lease if it is no longer needed so that another client may
- * immediately acquire a lease against the container or the blob.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container
- * and
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob
+ * Writes a blob by specifying the list of block IDs that make up the blob.
+ * In order to be written as part of a blob, a block must have been successfully written
+ * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to
+ * update a blob by uploading only those blocks that have changed, then committing the new and existing
+ * blocks together. Any blocks not specified in the block list and permanently deleted.
+ * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list
*
- * @param {LeaseOperationOptions} [options={}] option to configure lease management operations.
- * @returns {Promise} Response data for release lease operation.
- * @memberof BlobLeaseClient
+ * @param {string[]} blocks Array of 64-byte value that is base64-encoded
+ * @param {BlockBlobCommitBlockListOptions} [options] Options to the Block Blob Commit Block List operation.
+ * @returns {Promise} Response data for the Block Blob Commit Block List operation.
+ * @memberof BlockBlobClient
*/
- BlobLeaseClient.prototype.releaseLease = function (options) {
- var _a, _b, _c, _d, _e, _f;
+ BlockBlobClient.prototype.commitBlockList = function (blocks, options) {
+ var _a;
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _g, span, spanOptions, e_47;
- return tslib.__generator(this, function (_h) {
- switch (_h.label) {
+ var _b, span, spanOptions, e_27;
+ return tslib.__generator(this, function (_c) {
+ switch (_c.label) {
case 0:
- _g = createSpan("BlobLeaseClient-releaseLease", options.tracingOptions), span = _g.span, spanOptions = _g.spanOptions;
- if (this._isContainer &&
- ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||
- (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {
- throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.");
- }
- _h.label = 1;
+ options.conditions = options.conditions || {};
+ _b = createSpan("BlockBlobClient-commitBlockList", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
+ _c.label = 1;
case 1:
- _h.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this._containerOrBlobOperation.releaseLease(this._leaseId, {
+ _c.trys.push([1, 3, 4, 5]);
+ ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+ return [4 /*yield*/, this.blockBlobContext.commitBlockList({ latest: blocks }, {
abortSignal: options.abortSignal,
- modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }),
+ blobHTTPHeaders: options.blobHTTPHeaders,
+ leaseAccessConditions: options.conditions,
+ metadata: options.metadata,
+ modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+ cpkInfo: options.customerProvidedKey,
+ encryptionScope: options.encryptionScope,
+ tier: toAccessTier(options.tier),
+ blobTagsString: toBlobTagsString(options.tags),
spanOptions: spanOptions
})];
- case 2: return [2 /*return*/, _h.sent()];
+ case 2: return [2 /*return*/, _c.sent()];
case 3:
- e_47 = _h.sent();
+ e_27 = _c.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_47.message
+ message: e_27.message
});
- throw e_47;
+ throw e_27;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -50887,45 +51135,50 @@ var BlobLeaseClient = /** @class */ (function () {
});
};
/**
- * To renew the lease.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container
- * and
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob
+ * Returns the list of blocks that have been uploaded as part of a block blob
+ * using the specified block list filter.
+ * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list
*
- * @param {LeaseOperationOptions} [options={}] Optional option to configure lease management operations.
- * @returns {Promise} Response data for renew lease operation.
- * @memberof BlobLeaseClient
+ * @param {BlockListType} listType Specifies whether to return the list of committed blocks,
+ * the list of uncommitted blocks, or both lists together.
+ * @param {BlockBlobGetBlockListOptions} [options] Options to the Block Blob Get Block List operation.
+ * @returns {Promise} Response data for the Block Blob Get Block List operation.
+ * @memberof BlockBlobClient
*/
- BlobLeaseClient.prototype.renewLease = function (options) {
- var _a, _b, _c, _d, _e, _f;
+ BlockBlobClient.prototype.getBlockList = function (listType, options) {
+ var _a;
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _g, span, spanOptions, e_48;
- return tslib.__generator(this, function (_h) {
- switch (_h.label) {
+ var _b, span, spanOptions, res, e_28;
+ return tslib.__generator(this, function (_c) {
+ switch (_c.label) {
case 0:
- _g = createSpan("BlobLeaseClient-renewLease", options.tracingOptions), span = _g.span, spanOptions = _g.spanOptions;
- if (this._isContainer &&
- ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||
- (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {
- throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.");
- }
- _h.label = 1;
+ _b = createSpan("BlockBlobClient-getBlockList", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
+ _c.label = 1;
case 1:
- _h.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this._containerOrBlobOperation.renewLease(this._leaseId, {
+ _c.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this.blockBlobContext.getBlockList(listType, {
abortSignal: options.abortSignal,
- modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }),
+ leaseAccessConditions: options.conditions,
+ modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
spanOptions: spanOptions
})];
- case 2: return [2 /*return*/, _h.sent()];
+ case 2:
+ res = _c.sent();
+ if (!res.committedBlocks) {
+ res.committedBlocks = [];
+ }
+ if (!res.uncommittedBlocks) {
+ res.uncommittedBlocks = [];
+ }
+ return [2 /*return*/, res];
case 3:
- e_48 = _h.sent();
+ e_28 = _c.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_48.message
+ message: e_28.message
});
- throw e_48;
+ throw e_28;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -50934,51 +51187,45 @@ var BlobLeaseClient = /** @class */ (function () {
});
});
};
+ // High level functions
/**
- * To end the lease but ensure that another client cannot acquire a new lease
- * until the current lease period has expired.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container
- * and
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob
+ * ONLY AVAILABLE IN BROWSERS.
*
- * @static
- * @param {number} breakPeriod Break period
- * @param {LeaseOperationOptions} [options={}] Optional options to configure lease management operations.
- * @returns {Promise} Response data for break lease operation.
- * @memberof BlobLeaseClient
+ * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob.
+ *
+ * When buffer length <= 256MB, this method will use 1 upload call to finish the upload.
+ * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call
+ * {@link commitBlockList} to commit the block list.
+ *
+ * @export
+ * @param {Blob | ArrayBuffer | ArrayBufferView} browserData Blob, File, ArrayBuffer or ArrayBufferView
+ * @param {BlockBlobParallelUploadOptions} [options] Options to upload browser data.
+ * @returns {Promise} Response data for the Blob Upload operation.
+ * @memberof BlockBlobClient
*/
- BlobLeaseClient.prototype.breakLease = function (breakPeriod, options) {
- var _a, _b, _c, _d, _e, _f;
+ BlockBlobClient.prototype.uploadBrowserData = function (browserData, options) {
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _g, span, spanOptions, operationOptions, e_49;
- return tslib.__generator(this, function (_h) {
- switch (_h.label) {
+ var _a, span, spanOptions, browserBlob_1, e_29;
+ return tslib.__generator(this, function (_b) {
+ switch (_b.label) {
case 0:
- _g = createSpan("BlobLeaseClient-breakLease", options.tracingOptions), span = _g.span, spanOptions = _g.spanOptions;
- if (this._isContainer &&
- ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||
- (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {
- throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.");
- }
- _h.label = 1;
+ _a = createSpan("BlockBlobClient-uploadBrowserData", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _b.label = 1;
case 1:
- _h.trys.push([1, 3, 4, 5]);
- operationOptions = {
- abortSignal: options.abortSignal,
- breakPeriod: breakPeriod,
- modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }),
- spanOptions: spanOptions
- };
- return [4 /*yield*/, this._containerOrBlobOperation.breakLease(operationOptions)];
- case 2: return [2 /*return*/, _h.sent()];
+ _b.trys.push([1, 3, 4, 5]);
+ browserBlob_1 = new Blob([browserData]);
+ return [4 /*yield*/, this.uploadSeekableBlob(function (offset, size) {
+ return browserBlob_1.slice(offset, offset + size);
+ }, browserBlob_1.size, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
+ case 2: return [2 /*return*/, _b.sent()];
case 3:
- e_49 = _h.sent();
+ e_29 = _b.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_49.message
+ message: e_29.message
});
- throw e_49;
+ throw e_29;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -50987,360 +51234,527 @@ var BlobLeaseClient = /** @class */ (function () {
});
});
};
- return BlobLeaseClient;
-}());
-/**
- * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs.
- *
- * @export
- * @class ContainerClient
- */
-var ContainerClient = /** @class */ (function (_super) {
- tslib.__extends(ContainerClient, _super);
- function ContainerClient(urlOrConnectionString, credentialOrPipelineOrContainerName, options) {
- var _this = this;
- var pipeline;
- var url;
- options = options || {};
- if (credentialOrPipelineOrContainerName instanceof Pipeline) {
- // (url: string, pipeline: Pipeline)
- url = urlOrConnectionString;
- pipeline = credentialOrPipelineOrContainerName;
- }
- else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||
- credentialOrPipelineOrContainerName instanceof AnonymousCredential ||
- coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) {
- // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)
- url = urlOrConnectionString;
- pipeline = newPipeline(credentialOrPipelineOrContainerName, options);
- }
- else if (!credentialOrPipelineOrContainerName &&
- typeof credentialOrPipelineOrContainerName !== "string") {
- // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)
- // The second parameter is undefined. Use anonymous credential.
- url = urlOrConnectionString;
- pipeline = newPipeline(new AnonymousCredential(), options);
- }
- else if (credentialOrPipelineOrContainerName &&
- typeof credentialOrPipelineOrContainerName === "string") {
- // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)
- var containerName = credentialOrPipelineOrContainerName;
- var extractedCreds = extractConnectionStringParts(urlOrConnectionString);
- if (extractedCreds.kind === "AccountConnString") {
- {
- var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);
- url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName));
- options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri);
- pipeline = newPipeline(sharedKeyCredential, options);
- }
- }
- else if (extractedCreds.kind === "SASConnString") {
- url =
- appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) +
- "?" +
- extractedCreds.accountSas;
- pipeline = newPipeline(new AnonymousCredential(), options);
- }
- else {
- throw new Error("Connection string must be either an Account connection string or a SAS connection string");
- }
- }
- else {
- throw new Error("Expecting non-empty strings for containerName parameter");
- }
- _this = _super.call(this, url, pipeline) || this;
- _this._containerName = _this.getContainerNameFromUrl();
- _this.containerContext = new Container(_this.storageClientContext);
- return _this;
- }
- Object.defineProperty(ContainerClient.prototype, "containerName", {
- /**
- * The name of the container.
- */
- get: function () {
- return this._containerName;
- },
- enumerable: false,
- configurable: true
- });
/**
- * Creates a new container under the specified account. If the container with
- * the same name already exists, the operation fails.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container
+ * ONLY AVAILABLE IN BROWSERS.
*
- * @param {ContainerCreateOptions} [options] Options to Container Create operation.
- * @returns {Promise}
- * @memberof ContainerClient
+ * Uploads a browser {@link Blob} object to block blob. Requires a blobFactory as the data source,
+ * which need to return a {@link Blob} object with the offset and size provided.
*
- * Example usage:
+ * When buffer length <= 256MB, this method will use 1 upload call to finish the upload.
+ * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList
+ * to commit the block list.
*
- * ```js
- * const containerClient = blobServiceClient.getContainerClient("");
- * const createContainerResponse = await containerClient.create();
- * console.log("Container was created successfully", createContainerResponse.requestId);
- * ```
+ * @param {(offset: number, size: number) => Blob} blobFactory
+ * @param {number} size size of the data to upload.
+ * @param {BlockBlobParallelUploadOptions} [options] Options to Upload to Block Blob operation.
+ * @returns {Promise} Response data for the Blob Upload operation.
+ * @memberof BlockBlobClient
*/
- ContainerClient.prototype.create = function (options) {
+ BlockBlobClient.prototype.uploadSeekableBlob = function (blobFactory, size, options) {
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, e_50;
+ var _a, span, spanOptions, numBlocks_1, blockList_1, blockIDPrefix_1, transferProgress_2, batch, _loop_2, i, e_30;
+ var _this = this;
return tslib.__generator(this, function (_b) {
switch (_b.label) {
case 0:
- _a = createSpan("ContainerClient-create", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ if (!options.blockSize) {
+ options.blockSize = 0;
+ }
+ if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) {
+ throw new RangeError("blockSize option must be >= 0 and <= " + BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES);
+ }
+ if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) {
+ options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES;
+ }
+ if (options.maxSingleShotSize < 0 ||
+ options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) {
+ throw new RangeError("maxSingleShotSize option must be >= 0 and <= " + BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES);
+ }
+ if (options.blockSize === 0) {
+ if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) {
+ throw new RangeError(size + " is too larger to upload to a block blob.");
+ }
+ if (size > options.maxSingleShotSize) {
+ options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS);
+ if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) {
+ options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES;
+ }
+ }
+ }
+ if (!options.blobHTTPHeaders) {
+ options.blobHTTPHeaders = {};
+ }
+ if (!options.conditions) {
+ options.conditions = {};
+ }
+ _a = createSpan("BlockBlobClient-UploadSeekableBlob", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
_b.label = 1;
case 1:
- _b.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.containerContext.create(tslib.__assign(tslib.__assign({}, options), { spanOptions: spanOptions }))];
- case 2:
- // Spread operator in destructuring assignments,
- // this will filter out unwanted properties from the response object into result object
- return [2 /*return*/, _b.sent()];
+ _b.trys.push([1, 5, 6, 7]);
+ if (!(size <= options.maxSingleShotSize)) return [3 /*break*/, 3];
+ return [4 /*yield*/, this.upload(blobFactory(0, size), size, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
+ case 2: return [2 /*return*/, _b.sent()];
case 3:
- e_50 = _b.sent();
+ numBlocks_1 = Math.floor((size - 1) / options.blockSize) + 1;
+ if (numBlocks_1 > BLOCK_BLOB_MAX_BLOCKS) {
+ throw new RangeError("The buffer's size is too big or the BlockSize is too small;" +
+ ("the number of blocks must be <= " + BLOCK_BLOB_MAX_BLOCKS));
+ }
+ blockList_1 = [];
+ blockIDPrefix_1 = coreHttp.generateUuid();
+ transferProgress_2 = 0;
+ batch = new Batch(options.concurrency);
+ _loop_2 = function (i) {
+ batch.addOperation(function () { return tslib.__awaiter(_this, void 0, void 0, function () {
+ var blockID, start, end, contentLength;
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0:
+ blockID = generateBlockID(blockIDPrefix_1, i);
+ start = options.blockSize * i;
+ end = i === numBlocks_1 - 1 ? size : start + options.blockSize;
+ contentLength = end - start;
+ blockList_1.push(blockID);
+ return [4 /*yield*/, this.stageBlock(blockID, blobFactory(start, contentLength), contentLength, {
+ abortSignal: options.abortSignal,
+ conditions: options.conditions,
+ encryptionScope: options.encryptionScope,
+ tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions })
+ })];
+ case 1:
+ _a.sent();
+ // Update progress after block is successfully uploaded to server, in case of block trying
+ // TODO: Hook with convenience layer progress event in finer level
+ transferProgress_2 += contentLength;
+ if (options.onProgress) {
+ options.onProgress({
+ loadedBytes: transferProgress_2
+ });
+ }
+ return [2 /*return*/];
+ }
+ });
+ }); });
+ };
+ for (i = 0; i < numBlocks_1; i++) {
+ _loop_2(i);
+ }
+ return [4 /*yield*/, batch.do()];
+ case 4:
+ _b.sent();
+ return [2 /*return*/, this.commitBlockList(blockList_1, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
+ case 5:
+ e_30 = _b.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_50.message
+ message: e_30.message
});
- throw e_50;
- case 4:
+ throw e_30;
+ case 6:
span.end();
return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
+ case 7: return [2 /*return*/];
}
});
});
};
/**
- * Creates a new container under the specified account. If the container with
- * the same name already exists, it is not changed.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
*
- * @param {ContainerCreateOptions} [options]
- * @returns {Promise}
- * @memberof ContainerClient
+ * Uploads a local file in blocks to a block blob.
+ *
+ * When file size <= 256MB, this method will use 1 upload call to finish the upload.
+ * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList
+ * to commit the block list.
+ *
+ * @param {string} filePath Full path of local file
+ * @param {BlockBlobParallelUploadOptions} [options] Options to Upload to Block Blob operation.
+ * @returns {(Promise)} Response data for the Blob Upload operation.
+ * @memberof BlockBlobClient
*/
- ContainerClient.prototype.createIfNotExists = function (options) {
- var _a, _b;
+ BlockBlobClient.prototype.uploadFile = function (filePath, options) {
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _c, span, spanOptions, res, e_51;
- return tslib.__generator(this, function (_d) {
- switch (_d.label) {
+ var _a, span, spanOptions, size, e_31;
+ return tslib.__generator(this, function (_b) {
+ switch (_b.label) {
case 0:
- _c = createSpan("ContainerClient-createIfNotExists", options.tracingOptions), span = _c.span, spanOptions = _c.spanOptions;
- _d.label = 1;
+ _a = createSpan("BlockBlobClient-uploadFile", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _b.label = 1;
case 1:
- _d.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.create(tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
+ _b.trys.push([1, 4, 5, 6]);
+ return [4 /*yield*/, fsStat(filePath)];
case 2:
- res = _d.sent();
- return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable
- })];
- case 3:
- e_51 = _d.sent();
- if (((_a = e_51.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") {
- span.setStatus({
- code: api.CanonicalCode.ALREADY_EXISTS,
- message: "Expected exception when creating a container only if it does not already exist."
- });
- return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_51.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_51.response })];
- }
+ size = (_b.sent()).size;
+ return [4 /*yield*/, this.uploadResetableStream(function (offset, count) {
+ return fsCreateReadStream(filePath, {
+ autoClose: true,
+ end: count ? offset + count - 1 : Infinity,
+ start: offset
+ });
+ }, size, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
+ case 3: return [2 /*return*/, _b.sent()];
+ case 4:
+ e_31 = _b.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_51.message
+ message: e_31.message
});
- throw e_51;
- case 4:
+ throw e_31;
+ case 5:
span.end();
return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
+ case 6: return [2 /*return*/];
}
});
});
};
/**
- * Returns true if the Azure container resource represented by this client exists; false otherwise.
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
*
- * NOTE: use this function with care since an existing container might be deleted by other clients or
- * applications. Vice versa new containers with the same name might be added by other clients or
- * applications after this function completes.
+ * Uploads a Node.js Readable stream into block blob.
*
- * @param {ContainerExistsOptions} [options={}]
- * @returns {Promise}
- * @memberof ContainerClient
- */
- ContainerClient.prototype.exists = function (options) {
+ * PERFORMANCE IMPROVEMENT TIPS:
+ * * Input stream highWaterMark is better to set a same value with bufferSize
+ * parameter, which will avoid Buffer.concat() operations.
+ *
+ * @param {Readable} stream Node.js Readable stream
+ * @param {number} bufferSize Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB
+ * @param {number} maxConcurrency Max concurrency indicates the max number of buffers that can be allocated,
+ * positive correlation with max uploading concurrency. Default value is 5
+ * @param {BlockBlobUploadStreamOptions} [options] Options to Upload Stream to Block Blob operation.
+ * @returns {Promise} Response data for the Blob Upload operation.
+ * @memberof BlockBlobClient
+ */
+ BlockBlobClient.prototype.uploadStream = function (stream, bufferSize, maxConcurrency, options) {
+ if (bufferSize === void 0) { bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES; }
+ if (maxConcurrency === void 0) { maxConcurrency = 5; }
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, e_52;
+ var _a, span, spanOptions, blockNum_1, blockIDPrefix_2, transferProgress_3, blockList_2, scheduler, e_32;
+ var _this = this;
return tslib.__generator(this, function (_b) {
switch (_b.label) {
case 0:
- _a = createSpan("ContainerClient-exists", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ if (!options.blobHTTPHeaders) {
+ options.blobHTTPHeaders = {};
+ }
+ if (!options.conditions) {
+ options.conditions = {};
+ }
+ _a = createSpan("BlockBlobClient-uploadStream", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
_b.label = 1;
case 1:
- _b.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.getProperties({
- abortSignal: options.abortSignal,
- tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions })
- })];
+ _b.trys.push([1, 4, 5, 6]);
+ blockNum_1 = 0;
+ blockIDPrefix_2 = coreHttp.generateUuid();
+ transferProgress_3 = 0;
+ blockList_2 = [];
+ scheduler = new BufferScheduler(stream, bufferSize, maxConcurrency, function (body, length) { return tslib.__awaiter(_this, void 0, void 0, function () {
+ var blockID;
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0:
+ blockID = generateBlockID(blockIDPrefix_2, blockNum_1);
+ blockList_2.push(blockID);
+ blockNum_1++;
+ return [4 /*yield*/, this.stageBlock(blockID, body, length, {
+ conditions: options.conditions,
+ encryptionScope: options.encryptionScope,
+ tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions })
+ })];
+ case 1:
+ _a.sent();
+ // Update progress after block is successfully uploaded to server, in case of block trying
+ transferProgress_3 += length;
+ if (options.onProgress) {
+ options.onProgress({ loadedBytes: transferProgress_3 });
+ }
+ return [2 /*return*/];
+ }
+ });
+ }); },
+ // concurrency should set a smaller value than maxConcurrency, which is helpful to
+ // reduce the possibility when a outgoing handler waits for stream data, in
+ // this situation, outgoing handlers are blocked.
+ // Outgoing queue shouldn't be empty.
+ Math.ceil((maxConcurrency / 4) * 3));
+ return [4 /*yield*/, scheduler.do()];
case 2:
_b.sent();
- return [2 /*return*/, true];
- case 3:
- e_52 = _b.sent();
- if (e_52.statusCode === 404) {
- span.setStatus({
- code: api.CanonicalCode.NOT_FOUND,
- message: "Expected exception when checking container existence"
- });
- return [2 /*return*/, false];
- }
+ return [4 /*yield*/, this.commitBlockList(blockList_2, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
+ case 3: return [2 /*return*/, _b.sent()];
+ case 4:
+ e_32 = _b.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_52.message
+ message: e_32.message
});
- throw e_52;
- case 4:
+ throw e_32;
+ case 5:
span.end();
return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
+ case 6: return [2 /*return*/];
}
});
});
};
/**
- * Creates a {@link BlobClient}
- *
- * @param {string} blobName A blob name
- * @returns {BlobClient} A new BlobClient object for the given blob name.
- * @memberof ContainerClient
- */
- ContainerClient.prototype.getBlobClient = function (blobName) {
- return new BlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline);
- };
- /**
- * Creates an {@link AppendBlobClient}
- *
- * @param {string} blobName An append blob name
- * @returns {AppendBlobClient}
- * @memberof ContainerClient
- */
- ContainerClient.prototype.getAppendBlobClient = function (blobName) {
- return new AppendBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline);
- };
- /**
- * Creates a {@link BlockBlobClient}
- *
- * @param {string} blobName A block blob name
- * @returns {BlockBlobClient}
- * @memberof ContainerClient
- *
- * Example usage:
- *
- * ```js
- * const content = "Hello world!";
- *
- * const blockBlobClient = containerClient.getBlockBlobClient("");
- * const uploadBlobResponse = await blockBlobClient.upload(content, content.length);
- * ```
- */
- ContainerClient.prototype.getBlockBlobClient = function (blobName) {
- return new BlockBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline);
- };
- /**
- * Creates a {@link PageBlobClient}
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
*
- * @param {string} blobName A page blob name
- * @returns {PageBlobClient}
- * @memberof ContainerClient
- */
- ContainerClient.prototype.getPageBlobClient = function (blobName) {
- return new PageBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline);
- };
- /**
- * Returns all user-defined metadata and system properties for the specified
- * container. The data returned does not include the container's list of blobs.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties
+ * Accepts a Node.js Readable stream factory, and uploads in blocks to a block blob.
+ * The Readable stream factory must returns a Node.js Readable stream starting from the offset defined. The offset
+ * is the offset in the block blob to be uploaded.
*
- * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if
- * they originally contained uppercase characters. This differs from the metadata keys returned by
- * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which
- * will retain their original casing.
+ * When buffer length <= 256MB, this method will use 1 upload call to finish the upload.
+ * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList}
+ * to commit the block list.
*
- * @param {ContainerGetPropertiesOptions} [options] Options to Container Get Properties operation.
- * @returns {Promise}
- * @memberof ContainerClient
+ * @export
+ * @param {(offset: number) => NodeJS.ReadableStream} streamFactory Returns a Node.js Readable stream starting
+ * from the offset defined
+ * @param {number} size Size of the block blob
+ * @param {BlockBlobParallelUploadOptions} [options] Options to Upload to Block Blob operation.
+ * @returns {(Promise)} Response data for the Blob Upload operation.
+ * @memberof BlockBlobClient
*/
- ContainerClient.prototype.getProperties = function (options) {
+ BlockBlobClient.prototype.uploadResetableStream = function (streamFactory, size, options) {
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, e_53;
+ var _a, span, spanOptions, numBlocks_2, blockList_3, blockIDPrefix_3, transferProgress_4, batch, _loop_3, i, e_33;
+ var _this = this;
return tslib.__generator(this, function (_b) {
switch (_b.label) {
case 0:
+ if (!options.blockSize) {
+ options.blockSize = 0;
+ }
+ if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) {
+ throw new RangeError("blockSize option must be >= 0 and <= " + BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES);
+ }
+ if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) {
+ options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES;
+ }
+ if (options.maxSingleShotSize < 0 ||
+ options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) {
+ throw new RangeError("maxSingleShotSize option must be >= 0 and <= " + BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES);
+ }
+ if (options.blockSize === 0) {
+ if (size > BLOCK_BLOB_MAX_BLOCKS * BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) {
+ throw new RangeError(size + " is too larger to upload to a block blob.");
+ }
+ if (size > options.maxSingleShotSize) {
+ options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS);
+ if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) {
+ options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES;
+ }
+ }
+ }
+ if (!options.blobHTTPHeaders) {
+ options.blobHTTPHeaders = {};
+ }
if (!options.conditions) {
options.conditions = {};
}
- _a = createSpan("ContainerClient-getProperties", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _a = createSpan("BlockBlobClient-uploadResetableStream", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
_b.label = 1;
case 1:
- _b.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.containerContext.getProperties(tslib.__assign(tslib.__assign({ abortSignal: options.abortSignal }, options.conditions), { spanOptions: spanOptions }))];
+ _b.trys.push([1, 6, 7, 8]);
+ if (!(size <= options.maxSingleShotSize)) return [3 /*break*/, 3];
+ return [4 /*yield*/, this.upload(function () { return streamFactory(0); }, size, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
case 2: return [2 /*return*/, _b.sent()];
case 3:
- e_53 = _b.sent();
+ numBlocks_2 = Math.floor((size - 1) / options.blockSize) + 1;
+ if (numBlocks_2 > BLOCK_BLOB_MAX_BLOCKS) {
+ throw new RangeError("The buffer's size is too big or the BlockSize is too small;" +
+ ("the number of blocks must be <= " + BLOCK_BLOB_MAX_BLOCKS));
+ }
+ blockList_3 = [];
+ blockIDPrefix_3 = coreHttp.generateUuid();
+ transferProgress_4 = 0;
+ batch = new Batch(options.concurrency);
+ _loop_3 = function (i) {
+ batch.addOperation(function () { return tslib.__awaiter(_this, void 0, void 0, function () {
+ var blockID, start, end, contentLength;
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0:
+ blockID = generateBlockID(blockIDPrefix_3, i);
+ start = options.blockSize * i;
+ end = i === numBlocks_2 - 1 ? size : start + options.blockSize;
+ contentLength = end - start;
+ blockList_3.push(blockID);
+ return [4 /*yield*/, this.stageBlock(blockID, function () { return streamFactory(start, contentLength); }, contentLength, {
+ abortSignal: options.abortSignal,
+ conditions: options.conditions,
+ encryptionScope: options.encryptionScope,
+ tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions })
+ })];
+ case 1:
+ _a.sent();
+ // Update progress after block is successfully uploaded to server, in case of block trying
+ transferProgress_4 += contentLength;
+ if (options.onProgress) {
+ options.onProgress({ loadedBytes: transferProgress_4 });
+ }
+ return [2 /*return*/];
+ }
+ });
+ }); });
+ };
+ for (i = 0; i < numBlocks_2; i++) {
+ _loop_3(i);
+ }
+ return [4 /*yield*/, batch.do()];
+ case 4:
+ _b.sent();
+ return [4 /*yield*/, this.commitBlockList(blockList_3, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
+ case 5: return [2 /*return*/, _b.sent()];
+ case 6:
+ e_33 = _b.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_53.message
+ message: e_33.message
});
- throw e_53;
- case 4:
+ throw e_33;
+ case 7:
span.end();
return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
+ case 8: return [2 /*return*/];
}
});
});
};
+ return BlockBlobClient;
+}(BlobClient));
+/**
+ * PageBlobClient defines a set of operations applicable to page blobs.
+ *
+ * @export
+ * @class PageBlobClient
+ * @extends {BlobClient}
+ */
+var PageBlobClient = /** @class */ (function (_super) {
+ tslib.__extends(PageBlobClient, _super);
+ function PageBlobClient(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) {
+ var _this = this;
+ // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.
+ // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);
+ var pipeline;
+ var url;
+ options = options || {};
+ if (credentialOrPipelineOrContainerName instanceof Pipeline) {
+ // (url: string, pipeline: Pipeline)
+ url = urlOrConnectionString;
+ pipeline = credentialOrPipelineOrContainerName;
+ }
+ else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||
+ credentialOrPipelineOrContainerName instanceof AnonymousCredential ||
+ coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) {
+ // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)
+ url = urlOrConnectionString;
+ options = blobNameOrOptions;
+ pipeline = newPipeline(credentialOrPipelineOrContainerName, options);
+ }
+ else if (!credentialOrPipelineOrContainerName &&
+ typeof credentialOrPipelineOrContainerName !== "string") {
+ // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)
+ // The second parameter is undefined. Use anonymous credential.
+ url = urlOrConnectionString;
+ pipeline = newPipeline(new AnonymousCredential(), options);
+ }
+ else if (credentialOrPipelineOrContainerName &&
+ typeof credentialOrPipelineOrContainerName === "string" &&
+ blobNameOrOptions &&
+ typeof blobNameOrOptions === "string") {
+ // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)
+ var containerName = credentialOrPipelineOrContainerName;
+ var blobName = blobNameOrOptions;
+ var extractedCreds = extractConnectionStringParts(urlOrConnectionString);
+ if (extractedCreds.kind === "AccountConnString") {
+ {
+ var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);
+ url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName));
+ options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri);
+ pipeline = newPipeline(sharedKeyCredential, options);
+ }
+ }
+ else if (extractedCreds.kind === "SASConnString") {
+ url =
+ appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) +
+ "?" +
+ extractedCreds.accountSas;
+ pipeline = newPipeline(new AnonymousCredential(), options);
+ }
+ else {
+ throw new Error("Connection string must be either an Account connection string or a SAS connection string");
+ }
+ }
+ else {
+ throw new Error("Expecting non-empty strings for containerName and blobName parameters");
+ }
+ _this = _super.call(this, url, pipeline) || this;
+ _this.pageBlobContext = new PageBlob(_this.storageClientContext);
+ return _this;
+ }
/**
- * Marks the specified container for deletion. The container and any blobs
- * contained within it are later deleted during garbage collection.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container
+ * Creates a new PageBlobClient object identical to the source but with the
+ * specified snapshot timestamp.
+ * Provide "" will remove the snapshot and return a Client to the base blob.
*
- * @param {ContainerDeleteMethodOptions} [options] Options to Container Delete operation.
- * @returns {Promise}
- * @memberof ContainerClient
+ * @param {string} snapshot The snapshot timestamp.
+ * @returns {PageBlobClient} A new PageBlobClient object identical to the source but with the specified snapshot timestamp.
+ * @memberof PageBlobClient
*/
- ContainerClient.prototype.delete = function (options) {
+ PageBlobClient.prototype.withSnapshot = function (snapshot) {
+ return new PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline);
+ };
+ /**
+ * Creates a page blob of the specified length. Call uploadPages to upload data
+ * data to a page blob.
+ * @see https://docs.microsoft.com/rest/api/storageservices/put-blob
+ *
+ * @param {number} size size of the page blob.
+ * @param {PageBlobCreateOptions} [options] Options to the Page Blob Create operation.
+ * @returns {Promise} Response data for the Page Blob Create operation.
+ * @memberof PageBlobClient
+ */
+ PageBlobClient.prototype.create = function (size, options) {
+ var _a;
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, e_54;
- return tslib.__generator(this, function (_b) {
- switch (_b.label) {
+ var _b, span, spanOptions, e_34;
+ return tslib.__generator(this, function (_c) {
+ switch (_c.label) {
case 0:
- if (!options.conditions) {
- options.conditions = {};
- }
- _a = createSpan("ContainerClient-delete", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
- _b.label = 1;
+ options.conditions = options.conditions || {};
+ _b = createSpan("PageBlobClient-create", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
+ _c.label = 1;
case 1:
- _b.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.containerContext.deleteMethod({
+ _c.trys.push([1, 3, 4, 5]);
+ ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+ return [4 /*yield*/, this.pageBlobContext.create(0, size, {
abortSignal: options.abortSignal,
+ blobHTTPHeaders: options.blobHTTPHeaders,
+ blobSequenceNumber: options.blobSequenceNumber,
leaseAccessConditions: options.conditions,
- modifiedAccessConditions: options.conditions,
+ metadata: options.metadata,
+ modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+ cpkInfo: options.customerProvidedKey,
+ encryptionScope: options.encryptionScope,
+ tier: toAccessTier(options.tier),
+ blobTagsString: toBlobTagsString(options.tags),
spanOptions: spanOptions
})];
- case 2: return [2 /*return*/, _b.sent()];
+ case 2: return [2 /*return*/, _c.sent()];
case 3:
- e_54 = _b.sent();
+ e_34 = _c.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_54.message
+ message: e_34.message
});
- throw e_54;
+ throw e_34;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -51350,45 +51764,48 @@ var ContainerClient = /** @class */ (function (_super) {
});
};
/**
- * Marks the specified container for deletion if it exists. The container and any blobs
- * contained within it are later deleted during garbage collection.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container
+ * Creates a page blob of the specified length. Call uploadPages to upload data
+ * data to a page blob. If the blob with the same name already exists, the content
+ * of the existing blob will remain unchanged.
+ * @see https://docs.microsoft.com/rest/api/storageservices/put-blob
*
- * @param {ContainerDeleteMethodOptions} [options] Options to Container Delete operation.
- * @returns {Promise}
- * @memberof ContainerClient
+ * @param {number} size size of the page blob.
+ * @param {PageBlobCreateIfNotExistsOptions} [options]
+ * @returns {Promise}
+ * @memberof PageBlobClient
*/
- ContainerClient.prototype.deleteIfExists = function (options) {
+ PageBlobClient.prototype.createIfNotExists = function (size, options) {
var _a, _b;
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _c, span, spanOptions, res, e_55;
+ var _c, span, spanOptions, conditions, res, e_35;
return tslib.__generator(this, function (_d) {
switch (_d.label) {
case 0:
- _c = createSpan("ContainerClient-deleteIfExists", options.tracingOptions), span = _c.span, spanOptions = _c.spanOptions;
+ _c = createSpan("PageBlobClient-createIfNotExists", options.tracingOptions), span = _c.span, spanOptions = _c.spanOptions;
_d.label = 1;
case 1:
_d.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.delete(tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
+ conditions = { ifNoneMatch: ETagAny };
+ return [4 /*yield*/, this.create(size, tslib.__assign(tslib.__assign({}, options), { conditions: conditions, tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
case 2:
res = _d.sent();
return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable
})];
case 3:
- e_55 = _d.sent();
- if (((_a = e_55.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") {
+ e_35 = _d.sent();
+ if (((_a = e_35.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") {
span.setStatus({
- code: api.CanonicalCode.NOT_FOUND,
- message: "Expected exception when deleting a container only if it exists."
+ code: api.CanonicalCode.ALREADY_EXISTS,
+ message: "Expected exception when creating a blob only if it does not already exist."
});
- return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_55.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_55.response })];
+ return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_35.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_35.response })];
}
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_55.message
+ message: e_35.message
});
- throw e_55;
+ throw e_35;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -51398,51 +51815,51 @@ var ContainerClient = /** @class */ (function (_super) {
});
};
/**
- * Sets one or more user-defined name-value pairs for the specified container.
- *
- * If no option provided, or no metadata defined in the parameter, the container
- * metadata will be removed.
- *
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata
+ * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512.
+ * @see https://docs.microsoft.com/rest/api/storageservices/put-page
*
- * @param {Metadata} [metadata] Replace existing metadata with this value.
- * If no value provided the existing metadata will be removed.
- * @param {ContainerSetMetadataOptions} [options] Options to Container Set Metadata operation.
- * @returns {Promise}
- * @memberof ContainerClient
+ * @param {HttpRequestBody} body Data to upload
+ * @param {number} offset Offset of destination page blob
+ * @param {number} count Content length of the body, also number of bytes to be uploaded
+ * @param {PageBlobUploadPagesOptions} [options] Options to the Page Blob Upload Pages operation.
+ * @returns {Promise} Response data for the Page Blob Upload Pages operation.
+ * @memberof PageBlobClient
*/
- ContainerClient.prototype.setMetadata = function (metadata, options) {
+ PageBlobClient.prototype.uploadPages = function (body, offset, count, options) {
+ var _a;
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, e_56;
- return tslib.__generator(this, function (_b) {
- switch (_b.label) {
+ var _b, span, spanOptions, e_36;
+ return tslib.__generator(this, function (_c) {
+ switch (_c.label) {
case 0:
- if (!options.conditions) {
- options.conditions = {};
- }
- if (options.conditions.ifUnmodifiedSince) {
- throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service");
- }
- _a = createSpan("ContainerClient-setMetadata", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
- _b.label = 1;
+ options.conditions = options.conditions || {};
+ _b = createSpan("PageBlobClient-uploadPages", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
+ _c.label = 1;
case 1:
- _b.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.containerContext.setMetadata({
+ _c.trys.push([1, 3, 4, 5]);
+ ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+ return [4 /*yield*/, this.pageBlobContext.uploadPages(body, count, {
abortSignal: options.abortSignal,
leaseAccessConditions: options.conditions,
- metadata: metadata,
- modifiedAccessConditions: options.conditions,
+ modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+ onUploadProgress: options.onProgress,
+ range: rangeToString({ offset: offset, count: count }),
+ sequenceNumberAccessConditions: options.conditions,
+ transactionalContentMD5: options.transactionalContentMD5,
+ transactionalContentCrc64: options.transactionalContentCrc64,
+ cpkInfo: options.customerProvidedKey,
+ encryptionScope: options.encryptionScope,
spanOptions: spanOptions
})];
- case 2: return [2 /*return*/, _b.sent()];
+ case 2: return [2 /*return*/, _c.sent()];
case 3:
- e_56 = _b.sent();
+ e_36 = _c.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_56.message
+ message: e_36.message
});
- throw e_56;
+ throw e_36;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -51452,78 +51869,58 @@ var ContainerClient = /** @class */ (function (_super) {
});
};
/**
- * Gets the permissions for the specified container. The permissions indicate
- * whether container data may be accessed publicly.
- *
- * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings.
- * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z".
- *
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl
+ * The Upload Pages operation writes a range of pages to a page blob where the
+ * contents are read from a URL.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url
*
- * @param {ContainerGetAccessPolicyOptions} [options] Options to Container Get Access Policy operation.
- * @returns {Promise}
- * @memberof ContainerClient
+ * @param {string} sourceURL Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication
+ * @param {number} sourceOffset The source offset to copy from. Pass 0 to copy from the beginning of source page blob
+ * @param {number} destOffset Offset of destination page blob
+ * @param {number} count Number of bytes to be uploaded from source page blob
+ * @param {PageBlobUploadPagesFromURLOptions} [options={}]
+ * @returns {Promise}
+ * @memberof PageBlobClient
*/
- ContainerClient.prototype.getAccessPolicy = function (options) {
+ PageBlobClient.prototype.uploadPagesFromURL = function (sourceURL, sourceOffset, destOffset, count, options) {
+ var _a;
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, response, res, _i, response_1, identifier, accessPolicy, e_57;
- return tslib.__generator(this, function (_b) {
- switch (_b.label) {
+ var _b, span, spanOptions, e_37;
+ return tslib.__generator(this, function (_c) {
+ switch (_c.label) {
case 0:
- if (!options.conditions) {
- options.conditions = {};
- }
- _a = createSpan("ContainerClient-getAccessPolicy", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
- _b.label = 1;
+ options.conditions = options.conditions || {};
+ options.sourceConditions = options.sourceConditions || {};
+ _b = createSpan("PageBlobClient-uploadPagesFromURL", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
+ _c.label = 1;
case 1:
- _b.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.containerContext.getAccessPolicy({
+ _c.trys.push([1, 3, 4, 5]);
+ ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);
+ return [4 /*yield*/, this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count: count }), 0, rangeToString({ offset: destOffset, count: count }), {
abortSignal: options.abortSignal,
+ sourceContentMD5: options.sourceContentMD5,
+ sourceContentCrc64: options.sourceContentCrc64,
leaseAccessConditions: options.conditions,
+ sequenceNumberAccessConditions: options.conditions,
+ modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+ sourceModifiedAccessConditions: {
+ sourceIfMatch: options.sourceConditions.ifMatch,
+ sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,
+ sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,
+ sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince
+ },
+ cpkInfo: options.customerProvidedKey,
+ encryptionScope: options.encryptionScope,
spanOptions: spanOptions
})];
- case 2:
- response = _b.sent();
- res = {
- _response: response._response,
- blobPublicAccess: response.blobPublicAccess,
- date: response.date,
- etag: response.etag,
- errorCode: response.errorCode,
- lastModified: response.lastModified,
- requestId: response.requestId,
- clientRequestId: response.clientRequestId,
- signedIdentifiers: [],
- version: response.version
- };
- for (_i = 0, response_1 = response; _i < response_1.length; _i++) {
- identifier = response_1[_i];
- accessPolicy = undefined;
- if (identifier.accessPolicy) {
- accessPolicy = {
- permissions: identifier.accessPolicy.permissions
- };
- if (identifier.accessPolicy.expiresOn) {
- accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn);
- }
- if (identifier.accessPolicy.startsOn) {
- accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn);
- }
- }
- res.signedIdentifiers.push({
- accessPolicy: accessPolicy,
- id: identifier.id
- });
- }
- return [2 /*return*/, res];
+ case 2: return [2 /*return*/, _c.sent()];
case 3:
- e_57 = _b.sent();
+ e_37 = _c.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_57.message
+ message: e_37.message
});
- throw e_57;
+ throw e_37;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -51533,68 +51930,47 @@ var ContainerClient = /** @class */ (function (_super) {
});
};
/**
- * Sets the permissions for the specified container. The permissions indicate
- * whether blobs in a container may be accessed publicly.
- *
- * When you set permissions for a container, the existing permissions are replaced.
- * If no access or containerAcl provided, the existing container ACL will be
- * removed.
- *
- * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect.
- * During this interval, a shared access signature that is associated with the stored access policy will
- * fail with status code 403 (Forbidden), until the access policy becomes active.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl
+ * Frees the specified pages from the page blob.
+ * @see https://docs.microsoft.com/rest/api/storageservices/put-page
*
- * @param {PublicAccessType} [access] The level of public access to data in the container.
- * @param {SignedIdentifier[]} [containerAcl] Array of elements each having a unique Id and details of the access policy.
- * @param {ContainerSetAccessPolicyOptions} [options] Options to Container Set Access Policy operation.
- * @returns {Promise}
- * @memberof ContainerClient
+ * @param {number} [offset] Starting byte position of the pages to clear.
+ * @param {number} [count] Number of bytes to clear.
+ * @param {PageBlobClearPagesOptions} [options] Options to the Page Blob Clear Pages operation.
+ * @returns {Promise} Response data for the Page Blob Clear Pages operation.
+ * @memberof PageBlobClient
*/
- ContainerClient.prototype.setAccessPolicy = function (access, containerAcl, options) {
+ PageBlobClient.prototype.clearPages = function (offset, count, options) {
+ var _a;
+ if (offset === void 0) { offset = 0; }
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, acl, _i, _b, identifier, e_58;
+ var _b, span, spanOptions, e_38;
return tslib.__generator(this, function (_c) {
switch (_c.label) {
case 0:
options.conditions = options.conditions || {};
- _a = createSpan("ContainerClient-setAccessPolicy", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _b = createSpan("PageBlobClient-clearPages", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
_c.label = 1;
case 1:
_c.trys.push([1, 3, 4, 5]);
- acl = [];
- for (_i = 0, _b = containerAcl || []; _i < _b.length; _i++) {
- identifier = _b[_i];
- acl.push({
- accessPolicy: {
- expiresOn: identifier.accessPolicy.expiresOn
- ? truncatedISO8061Date(identifier.accessPolicy.expiresOn)
- : "",
- permissions: identifier.accessPolicy.permissions,
- startsOn: identifier.accessPolicy.startsOn
- ? truncatedISO8061Date(identifier.accessPolicy.startsOn)
- : ""
- },
- id: identifier.id
- });
- }
- return [4 /*yield*/, this.containerContext.setAccessPolicy({
+ return [4 /*yield*/, this.pageBlobContext.clearPages(0, {
abortSignal: options.abortSignal,
- access: access,
- containerAcl: acl,
leaseAccessConditions: options.conditions,
- modifiedAccessConditions: options.conditions,
+ modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+ range: rangeToString({ offset: offset, count: count }),
+ sequenceNumberAccessConditions: options.conditions,
+ cpkInfo: options.customerProvidedKey,
+ encryptionScope: options.encryptionScope,
spanOptions: spanOptions
})];
case 2: return [2 /*return*/, _c.sent()];
case 3:
- e_58 = _c.sent();
+ e_38 = _c.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_58.message
+ message: e_38.message
});
- throw e_58;
+ throw e_38;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -51604,64 +51980,46 @@ var ContainerClient = /** @class */ (function (_super) {
});
};
/**
- * Get a {@link BlobLeaseClient} that manages leases on the container.
- *
- * @param {string} [proposeLeaseId] Initial proposed lease Id.
- * @returns {BlobLeaseClient} A new BlobLeaseClient object for managing leases on the container.
- * @memberof ContainerClient
- */
- ContainerClient.prototype.getBlobLeaseClient = function (proposeLeaseId) {
- return new BlobLeaseClient(this, proposeLeaseId);
- };
- /**
- * Creates a new block blob, or updates the content of an existing block blob.
- *
- * Updating an existing block blob overwrites any existing metadata on the blob.
- * Partial updates are not supported; the content of the existing blob is
- * overwritten with the new content. To perform a partial update of a block blob's,
- * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}.
- *
- * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile},
- * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better
- * performance with concurrency uploading.
- *
- * @see https://docs.microsoft.com/rest/api/storageservices/put-blob
+ * Returns the list of valid page ranges for a page blob or snapshot of a page blob.
+ * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges
*
- * @param {string} blobName Name of the block blob to create or update.
- * @param {HttpRequestBody} body Blob, string, ArrayBuffer, ArrayBufferView or a function
- * which returns a new Readable stream whose offset is from data source beginning.
- * @param {number} contentLength Length of body in bytes. Use Buffer.byteLength() to calculate body length for a
- * string including non non-Base64/Hex-encoded characters.
- * @param {BlockBlobUploadOptions} [options] Options to configure the Block Blob Upload operation.
- * @returns {Promise<{ blockBlobClient: BlockBlobClient; response: BlockBlobUploadResponse }>} Block Blob upload response data and the corresponding BlockBlobClient instance.
- * @memberof ContainerClient
+ * @param {number} [offset] Starting byte position of the page ranges.
+ * @param {number} [count] Number of bytes to get.
+ * @param {PageBlobGetPageRangesOptions} [options] Options to the Page Blob Get Ranges operation.
+ * @returns {Promise} Response data for the Page Blob Get Ranges operation.
+ * @memberof PageBlobClient
*/
- ContainerClient.prototype.uploadBlockBlob = function (blobName, body, contentLength, options) {
+ PageBlobClient.prototype.getPageRanges = function (offset, count, options) {
+ var _a;
+ if (offset === void 0) { offset = 0; }
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, blockBlobClient, response, e_59;
- return tslib.__generator(this, function (_b) {
- switch (_b.label) {
+ var _b, span, spanOptions, e_39;
+ return tslib.__generator(this, function (_c) {
+ switch (_c.label) {
case 0:
- _a = createSpan("ContainerClient-uploadBlockBlob", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
- _b.label = 1;
+ options.conditions = options.conditions || {};
+ _b = createSpan("PageBlobClient-getPageRanges", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
+ _c.label = 1;
case 1:
- _b.trys.push([1, 3, 4, 5]);
- blockBlobClient = this.getBlockBlobClient(blobName);
- return [4 /*yield*/, blockBlobClient.upload(body, contentLength, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
- case 2:
- response = _b.sent();
- return [2 /*return*/, {
- blockBlobClient: blockBlobClient,
- response: response
- }];
+ _c.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this.pageBlobContext
+ .getPageRanges({
+ abortSignal: options.abortSignal,
+ leaseAccessConditions: options.conditions,
+ modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+ range: rangeToString({ offset: offset, count: count }),
+ spanOptions: spanOptions
+ })
+ .then(rangeResponseFromModel)];
+ case 2: return [2 /*return*/, _c.sent()];
case 3:
- e_59 = _b.sent();
+ e_39 = _c.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_59.message
+ message: e_39.message
});
- throw e_59;
+ throw e_39;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -51671,41 +52029,47 @@ var ContainerClient = /** @class */ (function (_super) {
});
};
/**
- * Marks the specified blob or snapshot for deletion. The blob is later deleted
- * during garbage collection. Note that in order to delete a blob, you must delete
- * all of its snapshots. You can delete both at the same time with the Delete
- * Blob operation.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob
+ * Gets the collection of page ranges that differ between a specified snapshot and this page blob.
+ * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges
*
- * @param {string} blobName
- * @param {ContainerDeleteBlobOptions} [options] Options to Blob Delete operation.
- * @returns {Promise} Block blob deletion response data.
- * @memberof ContainerClient
+ * @param {number} offset Starting byte position of the page blob
+ * @param {number} count Number of bytes to get ranges diff.
+ * @param {string} prevSnapshot Timestamp of snapshot to retrieve the difference.
+ * @param {PageBlobGetPageRangesDiffOptions} [options] Options to the Page Blob Get Page Ranges Diff operation.
+ * @returns {Promise} Response data for the Page Blob Get Page Range Diff operation.
+ * @memberof PageBlobClient
*/
- ContainerClient.prototype.deleteBlob = function (blobName, options) {
+ PageBlobClient.prototype.getPageRangesDiff = function (offset, count, prevSnapshot, options) {
+ var _a;
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, blobClient, e_60;
- return tslib.__generator(this, function (_b) {
- switch (_b.label) {
+ var _b, span, spanOptions, e_40;
+ return tslib.__generator(this, function (_c) {
+ switch (_c.label) {
case 0:
- _a = createSpan("ContainerClient-deleteBlob", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
- _b.label = 1;
+ options.conditions = options.conditions || {};
+ _b = createSpan("PageBlobClient-getPageRangesDiff", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
+ _c.label = 1;
case 1:
- _b.trys.push([1, 3, 4, 5]);
- blobClient = this.getBlobClient(blobName);
- if (options.versionId) {
- blobClient = blobClient.withVersion(options.versionId);
- }
- return [4 /*yield*/, blobClient.delete(tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
- case 2: return [2 /*return*/, _b.sent()];
+ _c.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this.pageBlobContext
+ .getPageRangesDiff({
+ abortSignal: options.abortSignal,
+ leaseAccessConditions: options.conditions,
+ modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+ prevsnapshot: prevSnapshot,
+ range: rangeToString({ offset: offset, count: count }),
+ spanOptions: spanOptions
+ })
+ .then(rangeResponseFromModel)];
+ case 2: return [2 /*return*/, _c.sent()];
case 3:
- e_60 = _b.sent();
+ e_40 = _c.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_60.message
+ message: e_40.message
});
- throw e_60;
+ throw e_40;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -51715,43 +52079,47 @@ var ContainerClient = /** @class */ (function (_super) {
});
};
/**
- * listBlobFlatSegment returns a single segment of blobs starting from the
- * specified Marker. Use an empty Marker to start enumeration from the beginning.
- * After getting a segment, process it, and then call listBlobsFlatSegment again
- * (passing the the previously-returned Marker) to get the next segment.
- * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs
+ * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks.
+ * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges
*
- * @param {string} [marker] A string value that identifies the portion of the list to be returned with the next list operation.
- * @param {ContainerListBlobsSegmentOptions} [options] Options to Container List Blob Flat Segment operation.
- * @returns {Promise}
- * @memberof ContainerClient
+ * @param {number} offset Starting byte position of the page blob
+ * @param {number} count Number of bytes to get ranges diff.
+ * @param {string} prevSnapshotUrl URL of snapshot to retrieve the difference.
+ * @param {PageBlobGetPageRangesDiffOptions} [options] Options to the Page Blob Get Page Ranges Diff operation.
+ * @returns {Promise} Response data for the Page Blob Get Page Range Diff operation.
+ * @memberof PageBlobClient
*/
- ContainerClient.prototype.listBlobFlatSegment = function (marker, options) {
+ PageBlobClient.prototype.getPageRangesDiffForManagedDisks = function (offset, count, prevSnapshotUrl, options) {
+ var _a;
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, response, wrappedResponse, e_61;
- return tslib.__generator(this, function (_b) {
- switch (_b.label) {
+ var _b, span, spanOptions, e_41;
+ return tslib.__generator(this, function (_c) {
+ switch (_c.label) {
case 0:
- _a = createSpan("ContainerClient-listBlobFlatSegment", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
- _b.label = 1;
+ options.conditions = options.conditions || {};
+ _b = createSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
+ _c.label = 1;
case 1:
- _b.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.containerContext.listBlobFlatSegment(tslib.__assign(tslib.__assign({ marker: marker }, options), { spanOptions: spanOptions }))];
- case 2:
- response = _b.sent();
- wrappedResponse = tslib.__assign(tslib.__assign({}, response), { _response: response._response, segment: tslib.__assign(tslib.__assign({}, response.segment), { blobItems: response.segment.blobItems.map(function (blobItemInteral) {
- var blobItem = tslib.__assign(tslib.__assign({}, blobItemInteral), { tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) });
- return blobItem;
- }) }) });
- return [2 /*return*/, wrappedResponse];
+ _c.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this.pageBlobContext
+ .getPageRangesDiff({
+ abortSignal: options.abortSignal,
+ leaseAccessConditions: options.conditions,
+ modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+ prevSnapshotUrl: prevSnapshotUrl,
+ range: rangeToString({ offset: offset, count: count }),
+ spanOptions: spanOptions
+ })
+ .then(rangeResponseFromModel)];
+ case 2: return [2 /*return*/, _c.sent()];
case 3:
- e_61 = _b.sent();
+ e_41 = _c.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_61.message
+ message: e_41.message
});
- throw e_61;
+ throw e_41;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -51761,44 +52129,42 @@ var ContainerClient = /** @class */ (function (_super) {
});
};
/**
- * listBlobHierarchySegment returns a single segment of blobs starting from
- * the specified Marker. Use an empty Marker to start enumeration from the
- * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment
- * again (passing the the previously-returned Marker) to get the next segment.
- * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs
+ * Resizes the page blob to the specified size (which must be a multiple of 512).
+ * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties
*
- * @param {string} delimiter The character or string used to define the virtual hierarchy
- * @param {string} [marker] A string value that identifies the portion of the list to be returned with the next list operation.
- * @param {ContainerListBlobsSegmentOptions} [options] Options to Container List Blob Hierarchy Segment operation.
- * @returns {Promise}
- * @memberof ContainerClient
+ * @param {number} size Target size
+ * @param {PageBlobResizeOptions} [options] Options to the Page Blob Resize operation.
+ * @returns {Promise} Response data for the Page Blob Resize operation.
+ * @memberof PageBlobClient
*/
- ContainerClient.prototype.listBlobHierarchySegment = function (delimiter, marker, options) {
+ PageBlobClient.prototype.resize = function (size, options) {
+ var _a;
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, response, wrappedResponse, e_62;
- return tslib.__generator(this, function (_b) {
- switch (_b.label) {
+ var _b, span, spanOptions, e_42;
+ return tslib.__generator(this, function (_c) {
+ switch (_c.label) {
case 0:
- _a = createSpan("ContainerClient-listBlobHierarchySegment", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
- _b.label = 1;
+ options.conditions = options.conditions || {};
+ _b = createSpan("PageBlobClient-resize", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
+ _c.label = 1;
case 1:
- _b.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.containerContext.listBlobHierarchySegment(delimiter, tslib.__assign(tslib.__assign({ marker: marker }, options), { spanOptions: spanOptions }))];
- case 2:
- response = _b.sent();
- wrappedResponse = tslib.__assign(tslib.__assign({}, response), { _response: response._response, segment: tslib.__assign(tslib.__assign({}, response.segment), { blobItems: response.segment.blobItems.map(function (blobItemInteral) {
- var blobItem = tslib.__assign(tslib.__assign({}, blobItemInteral), { tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) });
- return blobItem;
- }) }) });
- return [2 /*return*/, wrappedResponse];
+ _c.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this.pageBlobContext.resize(size, {
+ abortSignal: options.abortSignal,
+ leaseAccessConditions: options.conditions,
+ modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+ encryptionScope: options.encryptionScope,
+ spanOptions: spanOptions
+ })];
+ case 2: return [2 /*return*/, _c.sent()];
case 3:
- e_62 = _b.sent();
+ e_42 = _c.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_62.message
+ message: e_42.message
});
- throw e_62;
+ throw e_42;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -51808,883 +52174,814 @@ var ContainerClient = /** @class */ (function (_super) {
});
};
/**
- * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse
+ * Sets a page blob's sequence number.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties
*
- * @private
- * @param {string} [marker] A string value that identifies the portion of
- * the list of blobs to be returned with the next listing operation. The
- * operation returns the ContinuationToken value within the response body if the
- * listing operation did not return all blobs remaining to be listed
- * with the current page. The ContinuationToken value can be used as the value for
- * the marker parameter in a subsequent call to request the next page of list
- * items. The marker value is opaque to the client.
- * @param {ContainerListBlobsSegmentOptions} [options] Options to list blobs operation.
- * @returns {AsyncIterableIterator}
- * @memberof ContainerClient
+ * @param {SequenceNumberActionType} sequenceNumberAction Indicates how the service should modify the blob's sequence number.
+ * @param {number} [sequenceNumber] Required if sequenceNumberAction is max or update
+ * @param {PageBlobUpdateSequenceNumberOptions} [options] Options to the Page Blob Update Sequence Number operation.
+ * @returns {Promise} Response data for the Page Blob Update Sequence Number operation.
+ * @memberof PageBlobClient
*/
- ContainerClient.prototype.listSegments = function (marker, options) {
+ PageBlobClient.prototype.updateSequenceNumber = function (sequenceNumberAction, sequenceNumber, options) {
+ var _a;
if (options === void 0) { options = {}; }
- return tslib.__asyncGenerator(this, arguments, function listSegments_1() {
- var listBlobsFlatSegmentResponse;
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var _b, span, spanOptions, e_43;
+ return tslib.__generator(this, function (_c) {
+ switch (_c.label) {
case 0:
- if (!(!!marker || marker === undefined)) return [3 /*break*/, 7];
- _a.label = 1;
- case 1: return [4 /*yield*/, tslib.__await(this.listBlobFlatSegment(marker, options))];
- case 2:
- listBlobsFlatSegmentResponse = _a.sent();
- marker = listBlobsFlatSegmentResponse.continuationToken;
- return [4 /*yield*/, tslib.__await(listBlobsFlatSegmentResponse)];
- case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_a.sent()])];
- case 4: return [4 /*yield*/, _a.sent()];
- case 5:
- _a.sent();
- _a.label = 6;
- case 6:
- if (marker) return [3 /*break*/, 1];
- _a.label = 7;
- case 7: return [2 /*return*/];
+ options.conditions = options.conditions || {};
+ _b = createSpan("PageBlobClient-updateSequenceNumber", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
+ _c.label = 1;
+ case 1:
+ _c.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, {
+ abortSignal: options.abortSignal,
+ blobSequenceNumber: sequenceNumber,
+ leaseAccessConditions: options.conditions,
+ modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+ spanOptions: spanOptions
+ })];
+ case 2: return [2 /*return*/, _c.sent()];
+ case 3:
+ e_43 = _c.sent();
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_43.message
+ });
+ throw e_43;
+ case 4:
+ span.end();
+ return [7 /*endfinally*/];
+ case 5: return [2 /*return*/];
}
});
});
};
/**
- * Returns an AsyncIterableIterator of {@link BlobItem} objects
+ * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob.
+ * The snapshot is copied such that only the differential changes between the previously
+ * copied snapshot are transferred to the destination.
+ * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual.
+ * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob
+ * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots
*
- * @private
- * @param {ContainerListBlobsSegmentOptions} [options] Options to list blobs operation.
- * @returns {AsyncIterableIterator}
- * @memberof ContainerClient
+ * @param {string} copySource Specifies the name of the source page blob snapshot. For example,
+ * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=
+ * @param {PageBlobStartCopyIncrementalOptions} [options] Options to the Page Blob Copy Incremental operation.
+ * @returns {Promise} Response data for the Page Blob Copy Incremental operation.
+ * @memberof PageBlobClient
*/
- ContainerClient.prototype.listItems = function (options) {
+ PageBlobClient.prototype.startCopyIncremental = function (copySource, options) {
+ var _a;
if (options === void 0) { options = {}; }
- return tslib.__asyncGenerator(this, arguments, function listItems_1() {
- var marker, _a, _b, listBlobsFlatSegmentResponse, e_63_1;
- var e_63, _c;
- return tslib.__generator(this, function (_d) {
- switch (_d.label) {
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var _b, span, spanOptions, e_44;
+ return tslib.__generator(this, function (_c) {
+ switch (_c.label) {
case 0:
- _d.trys.push([0, 7, 8, 13]);
- _a = tslib.__asyncValues(this.listSegments(marker, options));
- _d.label = 1;
- case 1: return [4 /*yield*/, tslib.__await(_a.next())];
- case 2:
- if (!(_b = _d.sent(), !_b.done)) return [3 /*break*/, 6];
- listBlobsFlatSegmentResponse = _b.value;
- return [5 /*yield**/, tslib.__values(tslib.__asyncDelegator(tslib.__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems)))];
- case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_d.sent()])];
+ _b = createSpan("PageBlobClient-startCopyIncremental", options.tracingOptions), span = _b.span, spanOptions = _b.spanOptions;
+ _c.label = 1;
+ case 1:
+ _c.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this.pageBlobContext.copyIncremental(copySource, {
+ abortSignal: options.abortSignal,
+ modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }),
+ spanOptions: spanOptions
+ })];
+ case 2: return [2 /*return*/, _c.sent()];
+ case 3:
+ e_44 = _c.sent();
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_44.message
+ });
+ throw e_44;
case 4:
- _d.sent();
- _d.label = 5;
- case 5: return [3 /*break*/, 1];
- case 6: return [3 /*break*/, 13];
- case 7:
- e_63_1 = _d.sent();
- e_63 = { error: e_63_1 };
- return [3 /*break*/, 13];
- case 8:
- _d.trys.push([8, , 11, 12]);
- if (!(_b && !_b.done && (_c = _a.return))) return [3 /*break*/, 10];
- return [4 /*yield*/, tslib.__await(_c.call(_a))];
- case 9:
- _d.sent();
- _d.label = 10;
- case 10: return [3 /*break*/, 12];
- case 11:
- if (e_63) throw e_63.error;
+ span.end();
return [7 /*endfinally*/];
- case 12: return [7 /*endfinally*/];
- case 13: return [2 /*return*/];
+ case 5: return [2 /*return*/];
}
});
});
};
+ return PageBlobClient;
+}(BlobClient));
+/**
+ * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}.
+ *
+ * @export
+ * @class BlobLeaseClient
+ */
+var BlobLeaseClient = /** @class */ (function () {
/**
- * Returns an async iterable iterator to list all the blobs
- * under the specified account.
- *
- * .byPage() returns an async iterable iterator to list the blobs in pages.
- *
- * Example using `for await` syntax:
- *
- * ```js
- * // Get the containerClient before you run these snippets,
- * // Can be obtained from `blobServiceClient.getContainerClient("");`
- * let i = 1;
- * for await (const blob of containerClient.listBlobsFlat()) {
- * console.log(`Blob ${i++}: ${blob.name}`);
- * }
- * ```
- *
- * Example using `iter.next()`:
- *
- * ```js
- * let i = 1;
- * let iter = containerClient.listBlobsFlat();
- * let blobItem = await iter.next();
- * while (!blobItem.done) {
- * console.log(`Blob ${i++}: ${blobItem.value.name}`);
- * blobItem = await iter.next();
- * }
- * ```
- *
- * Example using `byPage()`:
- *
- * ```js
- * // passing optional maxPageSize in the page settings
- * let i = 1;
- * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) {
- * for (const blob of response.segment.blobItems) {
- * console.log(`Blob ${i++}: ${blob.name}`);
- * }
- * }
- * ```
- *
- * Example using paging with a marker:
- *
- * ```js
- * let i = 1;
- * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 });
- * let response = (await iterator.next()).value;
- *
- * // Prints 2 blob names
- * for (const blob of response.segment.blobItems) {
- * console.log(`Blob ${i++}: ${blob.name}`);
- * }
- *
- * // Gets next marker
- * let marker = response.continuationToken;
- *
- * // Passing next marker as continuationToken
- *
- * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 });
- * response = (await iterator.next()).value;
- *
- * // Prints 10 blob names
- * for (const blob of response.segment.blobItems) {
- * console.log(`Blob ${i++}: ${blob.name}`);
- * }
- * ```
- *
- * @param {ContainerListBlobsOptions} [options={}] Options to list blobs.
- * @returns {PagedAsyncIterableIterator} An asyncIterableIterator that supports paging.
- * @memberof ContainerClient
+ * Creates an instance of BlobLeaseClient.
+ * @param {(ContainerClient | BlobClient)} client The client to make the lease operation requests.
+ * @param {string} leaseId Initial proposed lease id.
+ * @memberof BlobLeaseClient
*/
- ContainerClient.prototype.listBlobsFlat = function (options) {
- var _a;
- var _this = this;
- if (options === void 0) { options = {}; }
- var include = [];
- if (options.includeCopy) {
- include.push("copy");
- }
- if (options.includeDeleted) {
- include.push("deleted");
- }
- if (options.includeMetadata) {
- include.push("metadata");
- }
- if (options.includeSnapshots) {
- include.push("snapshots");
- }
- if (options.includeVersions) {
- include.push("versions");
- }
- if (options.includeUncommitedBlobs) {
- include.push("uncommittedblobs");
+ function BlobLeaseClient(client, leaseId) {
+ var clientContext = new StorageClientContext(client.url, client.pipeline.toServiceClientOptions());
+ this._url = client.url;
+ if (client instanceof ContainerClient) {
+ this._isContainer = true;
+ this._containerOrBlobOperation = new Container(clientContext);
}
- if (options.includeTags) {
- include.push("tags");
+ else {
+ this._isContainer = false;
+ this._containerOrBlobOperation = new Blob$1(clientContext);
}
- if (options.prefix === "") {
- options.prefix = undefined;
+ if (!leaseId) {
+ leaseId = coreHttp.generateUuid();
}
- var updatedOptions = tslib.__assign(tslib.__assign({}, options), (include.length > 0 ? { include: include } : {}));
- // AsyncIterableIterator to iterate over blobs
- var iter = this.listItems(updatedOptions);
- return _a = {
- /**
- * @member {Promise} [next] The next method, part of the iteration protocol
- */
- next: function () {
- return iter.next();
+ this._leaseId = leaseId;
+ }
+ Object.defineProperty(BlobLeaseClient.prototype, "leaseId", {
+ /**
+ * Gets the lease Id.
+ *
+ * @readonly
+ * @memberof BlobLeaseClient
+ * @type {string}
+ */
+ get: function () {
+ return this._leaseId;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ Object.defineProperty(BlobLeaseClient.prototype, "url", {
+ /**
+ * Gets the url.
+ *
+ * @readonly
+ * @memberof BlobLeaseClient
+ * @type {string}
+ */
+ get: function () {
+ return this._url;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ /**
+ * Establishes and manages a lock on a container for delete operations, or on a blob
+ * for write and delete operations.
+ * The lock duration can be 15 to 60 seconds, or can be infinite.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container
+ * and
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob
+ *
+ * @param {number} duration Must be between 15 to 60 seconds, or infinite (-1)
+ * @param {LeaseOperationOptions} [options={}] option to configure lease management operations.
+ * @returns {Promise} Response data for acquire lease operation.
+ * @memberof BlobLeaseClient
+ */
+ BlobLeaseClient.prototype.acquireLease = function (duration, options) {
+ var _a, _b, _c, _d, _e, _f;
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var _g, span, spanOptions, e_45;
+ return tslib.__generator(this, function (_h) {
+ switch (_h.label) {
+ case 0:
+ _g = createSpan("BlobLeaseClient-acquireLease", options.tracingOptions), span = _g.span, spanOptions = _g.spanOptions;
+ if (this._isContainer &&
+ ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||
+ (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {
+ throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.");
+ }
+ _h.label = 1;
+ case 1:
+ _h.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this._containerOrBlobOperation.acquireLease({
+ abortSignal: options.abortSignal,
+ duration: duration,
+ modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }),
+ proposedLeaseId: this._leaseId,
+ spanOptions: spanOptions
+ })];
+ case 2: return [2 /*return*/, _h.sent()];
+ case 3:
+ e_45 = _h.sent();
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_45.message
+ });
+ throw e_45;
+ case 4:
+ span.end();
+ return [7 /*endfinally*/];
+ case 5: return [2 /*return*/];
}
- },
- /**
- * @member {Symbol} [asyncIterator] The connection to the async iterator, part of the iteration protocol
- */
- _a[Symbol.asyncIterator] = function () {
- return this;
- },
- /**
- * @member {Function} [byPage] Return an AsyncIterableIterator that works a page at a time
- */
- _a.byPage = function (settings) {
- if (settings === void 0) { settings = {}; }
- return _this.listSegments(settings.continuationToken, tslib.__assign({ maxPageSize: settings.maxPageSize }, updatedOptions));
- },
- _a;
+ });
+ });
};
/**
- * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse
+ * To change the ID of the lease.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container
+ * and
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob
*
- * @private
- * @param {string} delimiter The character or string used to define the virtual hierarchy
- * @param {string} [marker] A string value that identifies the portion of
- * the list of blobs to be returned with the next listing operation. The
- * operation returns the ContinuationToken value within the response body if the
- * listing operation did not return all blobs remaining to be listed
- * with the current page. The ContinuationToken value can be used as the value for
- * the marker parameter in a subsequent call to request the next page of list
- * items. The marker value is opaque to the client.
- * @param {ContainerListBlobsSegmentOptions} [options] Options to list blobs operation.
- * @returns {AsyncIterableIterator}
- * @memberof ContainerClient
+ * @param {string} proposedLeaseId the proposed new lease Id.
+ * @param {LeaseOperationOptions} [options={}] option to configure lease management operations.
+ * @returns {Promise} Response data for change lease operation.
+ * @memberof BlobLeaseClient
*/
- ContainerClient.prototype.listHierarchySegments = function (delimiter, marker, options) {
+ BlobLeaseClient.prototype.changeLease = function (proposedLeaseId, options) {
+ var _a, _b, _c, _d, _e, _f;
if (options === void 0) { options = {}; }
- return tslib.__asyncGenerator(this, arguments, function listHierarchySegments_1() {
- var listBlobsHierarchySegmentResponse;
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var _g, span, spanOptions, response, e_46;
+ return tslib.__generator(this, function (_h) {
+ switch (_h.label) {
case 0:
- if (!(!!marker || marker === undefined)) return [3 /*break*/, 7];
- _a.label = 1;
- case 1: return [4 /*yield*/, tslib.__await(this.listBlobHierarchySegment(delimiter, marker, options))];
+ _g = createSpan("BlobLeaseClient-changeLease", options.tracingOptions), span = _g.span, spanOptions = _g.spanOptions;
+ if (this._isContainer &&
+ ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||
+ (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {
+ throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.");
+ }
+ _h.label = 1;
+ case 1:
+ _h.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, {
+ abortSignal: options.abortSignal,
+ modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }),
+ spanOptions: spanOptions
+ })];
case 2:
- listBlobsHierarchySegmentResponse = _a.sent();
- marker = listBlobsHierarchySegmentResponse.continuationToken;
- return [4 /*yield*/, tslib.__await(listBlobsHierarchySegmentResponse)];
- case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_a.sent()])];
- case 4: return [4 /*yield*/, _a.sent()];
- case 5:
- _a.sent();
- _a.label = 6;
- case 6:
- if (marker) return [3 /*break*/, 1];
- _a.label = 7;
- case 7: return [2 /*return*/];
+ response = _h.sent();
+ this._leaseId = proposedLeaseId;
+ return [2 /*return*/, response];
+ case 3:
+ e_46 = _h.sent();
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_46.message
+ });
+ throw e_46;
+ case 4:
+ span.end();
+ return [7 /*endfinally*/];
+ case 5: return [2 /*return*/];
}
});
});
};
/**
- * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects.
+ * To free the lease if it is no longer needed so that another client may
+ * immediately acquire a lease against the container or the blob.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container
+ * and
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob
*
- * @private
- * @param {string} delimiter The character or string used to define the virtual hierarchy
- * @param {ContainerListBlobsSegmentOptions} [options] Options to list blobs operation.
- * @returns {AsyncIterableIterator<{ kind: "prefix" } & BlobPrefix | { kind: "blob" } & BlobItem>}
- * @memberof ContainerClient
+ * @param {LeaseOperationOptions} [options={}] option to configure lease management operations.
+ * @returns {Promise} Response data for release lease operation.
+ * @memberof BlobLeaseClient
*/
- ContainerClient.prototype.listItemsByHierarchy = function (delimiter, options) {
+ BlobLeaseClient.prototype.releaseLease = function (options) {
+ var _a, _b, _c, _d, _e, _f;
if (options === void 0) { options = {}; }
- return tslib.__asyncGenerator(this, arguments, function listItemsByHierarchy_1() {
- var marker, _a, _b, listBlobsHierarchySegmentResponse, segment, _i, _c, prefix, _d, _e, blob, e_64_1;
- var e_64, _f;
- return tslib.__generator(this, function (_g) {
- switch (_g.label) {
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var _g, span, spanOptions, e_47;
+ return tslib.__generator(this, function (_h) {
+ switch (_h.label) {
case 0:
- _g.trys.push([0, 14, 15, 20]);
- _a = tslib.__asyncValues(this.listHierarchySegments(delimiter, marker, options));
- _g.label = 1;
- case 1: return [4 /*yield*/, tslib.__await(_a.next())];
- case 2:
- if (!(_b = _g.sent(), !_b.done)) return [3 /*break*/, 13];
- listBlobsHierarchySegmentResponse = _b.value;
- segment = listBlobsHierarchySegmentResponse.segment;
- if (!segment.blobPrefixes) return [3 /*break*/, 7];
- _i = 0, _c = segment.blobPrefixes;
- _g.label = 3;
+ _g = createSpan("BlobLeaseClient-releaseLease", options.tracingOptions), span = _g.span, spanOptions = _g.spanOptions;
+ if (this._isContainer &&
+ ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||
+ (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {
+ throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.");
+ }
+ _h.label = 1;
+ case 1:
+ _h.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this._containerOrBlobOperation.releaseLease(this._leaseId, {
+ abortSignal: options.abortSignal,
+ modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }),
+ spanOptions: spanOptions
+ })];
+ case 2: return [2 /*return*/, _h.sent()];
case 3:
- if (!(_i < _c.length)) return [3 /*break*/, 7];
- prefix = _c[_i];
- return [4 /*yield*/, tslib.__await(tslib.__assign({ kind: "prefix" }, prefix))];
- case 4: return [4 /*yield*/, _g.sent()];
- case 5:
- _g.sent();
- _g.label = 6;
- case 6:
- _i++;
- return [3 /*break*/, 3];
- case 7:
- _d = 0, _e = segment.blobItems;
- _g.label = 8;
- case 8:
- if (!(_d < _e.length)) return [3 /*break*/, 12];
- blob = _e[_d];
- return [4 /*yield*/, tslib.__await(tslib.__assign({ kind: "blob" }, blob))];
- case 9: return [4 /*yield*/, _g.sent()];
- case 10:
- _g.sent();
- _g.label = 11;
- case 11:
- _d++;
- return [3 /*break*/, 8];
- case 12: return [3 /*break*/, 1];
- case 13: return [3 /*break*/, 20];
- case 14:
- e_64_1 = _g.sent();
- e_64 = { error: e_64_1 };
- return [3 /*break*/, 20];
- case 15:
- _g.trys.push([15, , 18, 19]);
- if (!(_b && !_b.done && (_f = _a.return))) return [3 /*break*/, 17];
- return [4 /*yield*/, tslib.__await(_f.call(_a))];
- case 16:
- _g.sent();
- _g.label = 17;
- case 17: return [3 /*break*/, 19];
- case 18:
- if (e_64) throw e_64.error;
+ e_47 = _h.sent();
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_47.message
+ });
+ throw e_47;
+ case 4:
+ span.end();
return [7 /*endfinally*/];
- case 19: return [7 /*endfinally*/];
- case 20: return [2 /*return*/];
+ case 5: return [2 /*return*/];
}
});
});
};
/**
- * Returns an async iterable iterator to list all the blobs by hierarchy.
- * under the specified account.
- *
- * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages.
- *
- * Example using `for await` syntax:
- *
- * ```js
- * for await (const item of containerClient.listBlobsByHierarchy("/")) {
- * if (item.kind === "prefix") {
- * console.log(`\tBlobPrefix: ${item.name}`);
- * } else {
- * console.log(`\tBlobItem: name - ${item.name}, last modified - ${item.properties.lastModified}`);
- * }
- * }
- * ```
- *
- * Example using `iter.next()`:
- *
- * ```js
- * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" });
- * let entity = await iter.next();
- * while (!entity.done) {
- * let item = entity.value;
- * if (item.kind === "prefix") {
- * console.log(`\tBlobPrefix: ${item.name}`);
- * } else {
- * console.log(`\tBlobItem: name - ${item.name}, last modified - ${item.properties.lastModified}`);
- * }
- * entity = await iter.next();
- * }
- * ```js
- *
- * Example using `byPage()`:
- *
- * ```js
- * console.log("Listing blobs by hierarchy by page");
- * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) {
- * const segment = response.segment;
- * if (segment.blobPrefixes) {
- * for (const prefix of segment.blobPrefixes) {
- * console.log(`\tBlobPrefix: ${prefix.name}`);
- * }
- * }
- * for (const blob of response.segment.blobItems) {
- * console.log(`\tBlobItem: name - ${blob.name}, last modified - ${blob.properties.lastModified}`);
- * }
- * }
- * ```
- *
- * Example using paging with a max page size:
- *
- * ```js
- * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size");
- *
- * let i = 1;
- * for await (const response of containerClient.listBlobsByHierarchy("/", { prefix: "prefix2/sub1/"}).byPage({ maxPageSize: 2 })) {
- * console.log(`Page ${i++}`);
- * const segment = response.segment;
- *
- * if (segment.blobPrefixes) {
- * for (const prefix of segment.blobPrefixes) {
- * console.log(`\tBlobPrefix: ${prefix.name}`);
- * }
- * }
- *
- * for (const blob of response.segment.blobItems) {
- * console.log(`\tBlobItem: name - ${blob.name}, last modified - ${blob.properties.lastModified}`);
- * }
- * }
- * ```
+ * To renew the lease.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container
+ * and
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob
*
- * @param {string} delimiter The character or string used to define the virtual hierarchy
- * @param {ContainerListBlobsOptions} [options={}] Options to list blobs operation.
- * @returns {(PagedAsyncIterableIterator<
- * { kind: "prefix" } & BlobPrefix | { kind: "blob" } & BlobItem,
- * ContainerListBlobHierarchySegmentResponse
- * >)}
- * @memberof ContainerClient
+ * @param {LeaseOperationOptions} [options={}] Optional option to configure lease management operations.
+ * @returns {Promise} Response data for renew lease operation.
+ * @memberof BlobLeaseClient
*/
- ContainerClient.prototype.listBlobsByHierarchy = function (delimiter, options) {
- var _a;
- var _this = this;
+ BlobLeaseClient.prototype.renewLease = function (options) {
+ var _a, _b, _c, _d, _e, _f;
if (options === void 0) { options = {}; }
- if (delimiter === "") {
- throw new RangeError("delimiter should contain one or more characters");
- }
- var include = [];
- if (options.includeCopy) {
- include.push("copy");
- }
- if (options.includeDeleted) {
- include.push("deleted");
- }
- if (options.includeMetadata) {
- include.push("metadata");
- }
- if (options.includeSnapshots) {
- include.push("snapshots");
- }
- if (options.includeVersions) {
- include.push("versions");
- }
- if (options.includeUncommitedBlobs) {
- include.push("uncommittedblobs");
- }
- if (options.includeTags) {
- include.push("tags");
- }
- if (options.prefix === "") {
- options.prefix = undefined;
- }
- var updatedOptions = tslib.__assign(tslib.__assign({}, options), (include.length > 0 ? { include: include } : {}));
- // AsyncIterableIterator to iterate over blob prefixes and blobs
- var iter = this.listItemsByHierarchy(delimiter, updatedOptions);
- return _a = {
- /**
- * @member {Promise} [next] The next method, part of the iteration protocol
- */
- next: function () {
- return tslib.__awaiter(this, void 0, void 0, function () {
- return tslib.__generator(this, function (_a) {
- return [2 /*return*/, iter.next()];
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var _g, span, spanOptions, e_48;
+ return tslib.__generator(this, function (_h) {
+ switch (_h.label) {
+ case 0:
+ _g = createSpan("BlobLeaseClient-renewLease", options.tracingOptions), span = _g.span, spanOptions = _g.spanOptions;
+ if (this._isContainer &&
+ ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||
+ (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {
+ throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.");
+ }
+ _h.label = 1;
+ case 1:
+ _h.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this._containerOrBlobOperation.renewLease(this._leaseId, {
+ abortSignal: options.abortSignal,
+ modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }),
+ spanOptions: spanOptions
+ })];
+ case 2: return [2 /*return*/, _h.sent()];
+ case 3:
+ e_48 = _h.sent();
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_48.message
});
- });
+ throw e_48;
+ case 4:
+ span.end();
+ return [7 /*endfinally*/];
+ case 5: return [2 /*return*/];
}
- },
- /**
- * @member {Symbol} [asyncIterator] The connection to the async iterator, part of the iteration protocol
- */
- _a[Symbol.asyncIterator] = function () {
- return this;
- },
- /**
- * @member {Function} [byPage] Return an AsyncIterableIterator that works a page at a time
- */
- _a.byPage = function (settings) {
- if (settings === void 0) { settings = {}; }
- return _this.listHierarchySegments(delimiter, settings.continuationToken, tslib.__assign({ maxPageSize: settings.maxPageSize }, updatedOptions));
- },
- _a;
- };
- ContainerClient.prototype.getContainerNameFromUrl = function () {
- var containerName;
- try {
- // URL may look like the following
- // "https://myaccount.blob.core.windows.net/mycontainer?sasString";
- // "https://myaccount.blob.core.windows.net/mycontainer";
- // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername`
- // http://localhost:10001/devstoreaccount1/containername
- var parsedUrl = coreHttp.URLBuilder.parse(this.url);
- if (parsedUrl.getHost().split(".")[1] === "blob") {
- // "https://myaccount.blob.core.windows.net/containername".
- // "https://customdomain.com/containername".
- // .getPath() -> /containername
- containerName = parsedUrl.getPath().split("/")[1];
- }
- else if (isIpEndpointStyle(parsedUrl)) {
- // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername
- // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername
- // .getPath() -> /devstoreaccount1/containername
- containerName = parsedUrl.getPath().split("/")[2];
- }
- else {
- // "https://customdomain.com/containername".
- // .getPath() -> /containername
- containerName = parsedUrl.getPath().split("/")[1];
- }
- // decode the encoded containerName - to get all the special characters that might be present in it
- containerName = decodeURIComponent(containerName);
- if (!containerName) {
- throw new Error("Provided containerName is invalid.");
- }
- return containerName;
- }
- catch (error) {
- throw new Error("Unable to extract containerName with provided information.");
- }
- };
- return ContainerClient;
-}(StorageClient));
-
-function getBodyAsText(batchResponse) {
- return tslib.__awaiter(this, void 0, void 0, function () {
- var buffer, responseLength;
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0:
- buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES);
- return [4 /*yield*/, streamToBuffer2(batchResponse.readableStreamBody, buffer)];
- case 1:
- responseLength = _a.sent();
- // Slice the buffer to trim the empty ending.
- buffer = buffer.slice(0, responseLength);
- return [2 /*return*/, buffer.toString()];
- }
+ });
});
- });
-}
-function utf8ByteLength(str) {
- return Buffer.byteLength(str);
-}
-
-var HTTP_HEADER_DELIMITER = ": ";
-var SPACE_DELIMITER = " ";
-var NOT_FOUND = -1;
-/**
- * Util class for parsing batch response.
- */
-var BatchResponseParser = /** @class */ (function () {
- function BatchResponseParser(batchResponse, subRequests) {
- if (!batchResponse || !batchResponse.contentType) {
- // In special case(reported), server may return invalid content-type which could not be parsed.
- throw new RangeError("batchResponse is malformed or doesn't contain valid content-type.");
- }
- if (!subRequests || subRequests.size === 0) {
- // This should be prevent during coding.
- throw new RangeError("Invalid state: subRequests is not provided or size is 0.");
- }
- this.batchResponse = batchResponse;
- this.subRequests = subRequests;
- this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1];
- this.perResponsePrefix = "--" + this.responseBatchBoundary + HTTP_LINE_ENDING;
- this.batchResponseEnding = "--" + this.responseBatchBoundary + "--";
- }
- // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response
- BatchResponseParser.prototype.parseBatchResponse = function () {
+ };
+ /**
+ * To end the lease but ensure that another client cannot acquire a new lease
+ * until the current lease period has expired.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container
+ * and
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob
+ *
+ * @static
+ * @param {number} breakPeriod Break period
+ * @param {LeaseOperationOptions} [options={}] Optional options to configure lease management operations.
+ * @returns {Promise} Response data for break lease operation.
+ * @memberof BlobLeaseClient
+ */
+ BlobLeaseClient.prototype.breakLease = function (breakPeriod, options) {
+ var _a, _b, _c, _d, _e, _f;
+ if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var responseBodyAsText, subResponses, subResponseCount, deserializedSubResponses, subResponsesSucceededCount, subResponsesFailedCount, index, subResponse, deserializedSubResponse, responseLines, subRespHeaderStartFound, subRespHeaderEndFound, subRespFailed, contentId, _i, responseLines_1, responseLine, tokens, tokens;
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
+ var _g, span, spanOptions, operationOptions, e_49;
+ return tslib.__generator(this, function (_h) {
+ switch (_h.label) {
case 0:
- // When logic reach here, suppose batch request has already succeeded with 202, so we can further parse
- // sub request's response.
- if (this.batchResponse._response.status != HTTPURLConnection.HTTP_ACCEPTED) {
- throw new Error("Invalid state: batch request failed with status: '" + this.batchResponse._response.status + "'.");
+ _g = createSpan("BlobLeaseClient-breakLease", options.tracingOptions), span = _g.span, spanOptions = _g.spanOptions;
+ if (this._isContainer &&
+ ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) ||
+ (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) {
+ throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.");
}
- return [4 /*yield*/, getBodyAsText(this.batchResponse)];
+ _h.label = 1;
case 1:
- responseBodyAsText = _a.sent();
- subResponses = responseBodyAsText
- .split(this.batchResponseEnding)[0] // string after ending is useless
- .split(this.perResponsePrefix)
- .slice(1);
- subResponseCount = subResponses.length;
- // Defensive coding in case of potential error parsing.
- // Note: subResponseCount == 1 is special case where sub request is invalid.
- // We try to prevent such cases through early validation, e.g. validate sub request count >= 1.
- // While in unexpected sub request invalid case, we allow sub response to be parsed and return to user.
- if (subResponseCount != this.subRequests.size && subResponseCount != 1) {
- throw new Error("Invalid state: sub responses' count is not equal to sub requests' count.");
- }
- deserializedSubResponses = new Array(subResponseCount);
- subResponsesSucceededCount = 0;
- subResponsesFailedCount = 0;
- // Parse sub subResponses.
- for (index = 0; index < subResponseCount; index++) {
- subResponse = subResponses[index];
- deserializedSubResponses[index] = {};
- deserializedSubResponse = deserializedSubResponses[index];
- deserializedSubResponse.headers = new coreHttp.HttpHeaders();
- responseLines = subResponse.split("" + HTTP_LINE_ENDING);
- subRespHeaderStartFound = false;
- subRespHeaderEndFound = false;
- subRespFailed = false;
- contentId = NOT_FOUND;
- for (_i = 0, responseLines_1 = responseLines; _i < responseLines_1.length; _i++) {
- responseLine = responseLines_1[_i];
- if (!subRespHeaderStartFound) {
- // Convention line to indicate content ID
- if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) {
- contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]);
- }
- // Http version line with status code indicates the start of sub request's response.
- // Example: HTTP/1.1 202 Accepted
- if (responseLine.startsWith(HTTP_VERSION_1_1)) {
- subRespHeaderStartFound = true;
- tokens = responseLine.split(SPACE_DELIMITER);
- deserializedSubResponse.status = parseInt(tokens[1]);
- deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER);
- }
- continue; // Skip convention headers not specifically for sub request i.e. Content-Type: application/http and Content-ID: *
- }
- if (responseLine.trim() === "") {
- // Sub response's header start line already found, and the first empty line indicates header end line found.
- if (!subRespHeaderEndFound) {
- subRespHeaderEndFound = true;
- }
- continue; // Skip empty line
- }
- // Note: when code reach here, it indicates subRespHeaderStartFound == true
- if (!subRespHeaderEndFound) {
- if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) {
- // Defensive coding to prevent from missing valuable lines.
- throw new Error("Invalid state: find non-empty line '" + responseLine + "' without HTTP header delimiter '" + HTTP_HEADER_DELIMITER + "'.");
- }
- tokens = responseLine.split(HTTP_HEADER_DELIMITER);
- deserializedSubResponse.headers.set(tokens[0], tokens[1]);
- if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) {
- deserializedSubResponse.errorCode = tokens[1];
- subRespFailed = true;
- }
- }
- else {
- // Assemble body of sub response.
- if (!deserializedSubResponse.bodyAsText) {
- deserializedSubResponse.bodyAsText = "";
- }
- deserializedSubResponse.bodyAsText += responseLine;
- }
- } // Inner for end
- if (contentId != NOT_FOUND) {
- deserializedSubResponse._request = this.subRequests.get(contentId);
- }
- if (subRespFailed) {
- subResponsesFailedCount++;
- }
- else {
- subResponsesSucceededCount++;
- }
- }
- return [2 /*return*/, {
- subResponses: deserializedSubResponses,
- subResponsesSucceededCount: subResponsesSucceededCount,
- subResponsesFailedCount: subResponsesFailedCount
- }];
+ _h.trys.push([1, 3, 4, 5]);
+ operationOptions = {
+ abortSignal: options.abortSignal,
+ breakPeriod: breakPeriod,
+ modifiedAccessConditions: tslib.__assign(tslib.__assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }),
+ spanOptions: spanOptions
+ };
+ return [4 /*yield*/, this._containerOrBlobOperation.breakLease(operationOptions)];
+ case 2: return [2 /*return*/, _h.sent()];
+ case 3:
+ e_49 = _h.sent();
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_49.message
+ });
+ throw e_49;
+ case 4:
+ span.end();
+ return [7 /*endfinally*/];
+ case 5: return [2 /*return*/];
}
});
});
};
- return BatchResponseParser;
+ return BlobLeaseClient;
}());
-
-var MutexLockStatus;
-(function (MutexLockStatus) {
- MutexLockStatus[MutexLockStatus["LOCKED"] = 0] = "LOCKED";
- MutexLockStatus[MutexLockStatus["UNLOCKED"] = 1] = "UNLOCKED";
-})(MutexLockStatus || (MutexLockStatus = {}));
/**
- * An async mutex lock.
+ * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs.
*
* @export
- * @class Mutex
+ * @class ContainerClient
*/
-var Mutex = /** @class */ (function () {
- function Mutex() {
+var ContainerClient = /** @class */ (function (_super) {
+ tslib.__extends(ContainerClient, _super);
+ function ContainerClient(urlOrConnectionString, credentialOrPipelineOrContainerName, options) {
+ var _this = this;
+ var pipeline;
+ var url;
+ options = options || {};
+ if (credentialOrPipelineOrContainerName instanceof Pipeline) {
+ // (url: string, pipeline: Pipeline)
+ url = urlOrConnectionString;
+ pipeline = credentialOrPipelineOrContainerName;
+ }
+ else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||
+ credentialOrPipelineOrContainerName instanceof AnonymousCredential ||
+ coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) {
+ // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)
+ url = urlOrConnectionString;
+ pipeline = newPipeline(credentialOrPipelineOrContainerName, options);
+ }
+ else if (!credentialOrPipelineOrContainerName &&
+ typeof credentialOrPipelineOrContainerName !== "string") {
+ // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)
+ // The second parameter is undefined. Use anonymous credential.
+ url = urlOrConnectionString;
+ pipeline = newPipeline(new AnonymousCredential(), options);
+ }
+ else if (credentialOrPipelineOrContainerName &&
+ typeof credentialOrPipelineOrContainerName === "string") {
+ // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)
+ var containerName = credentialOrPipelineOrContainerName;
+ var extractedCreds = extractConnectionStringParts(urlOrConnectionString);
+ if (extractedCreds.kind === "AccountConnString") {
+ {
+ var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);
+ url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName));
+ options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri);
+ pipeline = newPipeline(sharedKeyCredential, options);
+ }
+ }
+ else if (extractedCreds.kind === "SASConnString") {
+ url =
+ appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) +
+ "?" +
+ extractedCreds.accountSas;
+ pipeline = newPipeline(new AnonymousCredential(), options);
+ }
+ else {
+ throw new Error("Connection string must be either an Account connection string or a SAS connection string");
+ }
+ }
+ else {
+ throw new Error("Expecting non-empty strings for containerName parameter");
+ }
+ _this = _super.call(this, url, pipeline) || this;
+ _this._containerName = _this.getContainerNameFromUrl();
+ _this.containerContext = new Container(_this.storageClientContext);
+ return _this;
}
+ Object.defineProperty(ContainerClient.prototype, "containerName", {
+ /**
+ * The name of the container.
+ */
+ get: function () {
+ return this._containerName;
+ },
+ enumerable: false,
+ configurable: true
+ });
/**
- * Lock for a specific key. If the lock has been acquired by another customer, then
- * will wait until getting the lock.
+ * Creates a new container under the specified account. If the container with
+ * the same name already exists, the operation fails.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container
*
- * @static
- * @param {string} key lock key
- * @returns {Promise}
- * @memberof Mutex
+ * @param {ContainerCreateOptions} [options] Options to Container Create operation.
+ * @returns {Promise}
+ * @memberof ContainerClient
+ *
+ * Example usage:
+ *
+ * ```js
+ * const containerClient = blobServiceClient.getContainerClient("");
+ * const createContainerResponse = await containerClient.create();
+ * console.log("Container was created successfully", createContainerResponse.requestId);
+ * ```
*/
- Mutex.lock = function (key) {
+ ContainerClient.prototype.create = function (options) {
+ if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _this = this;
- return tslib.__generator(this, function (_a) {
- return [2 /*return*/, new Promise(function (resolve) {
- if (_this.keys[key] === undefined || _this.keys[key] === MutexLockStatus.UNLOCKED) {
- _this.keys[key] = MutexLockStatus.LOCKED;
- resolve();
- }
- else {
- _this.onUnlockEvent(key, function () {
- _this.keys[key] = MutexLockStatus.LOCKED;
- resolve();
- });
- }
- })];
+ var _a, span, spanOptions, e_50;
+ return tslib.__generator(this, function (_b) {
+ switch (_b.label) {
+ case 0:
+ _a = createSpan("ContainerClient-create", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _b.label = 1;
+ case 1:
+ _b.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this.containerContext.create(tslib.__assign(tslib.__assign({}, options), { spanOptions: spanOptions }))];
+ case 2:
+ // Spread operator in destructuring assignments,
+ // this will filter out unwanted properties from the response object into result object
+ return [2 /*return*/, _b.sent()];
+ case 3:
+ e_50 = _b.sent();
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_50.message
+ });
+ throw e_50;
+ case 4:
+ span.end();
+ return [7 /*endfinally*/];
+ case 5: return [2 /*return*/];
+ }
});
});
};
/**
- * Unlock a key.
+ * Creates a new container under the specified account. If the container with
+ * the same name already exists, it is not changed.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container
*
- * @static
- * @param {string} key
- * @returns {Promise}
- * @memberof Mutex
+ * @param {ContainerCreateOptions} [options]
+ * @returns {Promise}
+ * @memberof ContainerClient
*/
- Mutex.unlock = function (key) {
+ ContainerClient.prototype.createIfNotExists = function (options) {
+ var _a, _b;
+ if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _this = this;
- return tslib.__generator(this, function (_a) {
- return [2 /*return*/, new Promise(function (resolve) {
- if (_this.keys[key] === MutexLockStatus.LOCKED) {
- _this.emitUnlockEvent(key);
+ var _c, span, spanOptions, res, e_51;
+ return tslib.__generator(this, function (_d) {
+ switch (_d.label) {
+ case 0:
+ _c = createSpan("ContainerClient-createIfNotExists", options.tracingOptions), span = _c.span, spanOptions = _c.spanOptions;
+ _d.label = 1;
+ case 1:
+ _d.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this.create(tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
+ case 2:
+ res = _d.sent();
+ return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable
+ })];
+ case 3:
+ e_51 = _d.sent();
+ if (((_a = e_51.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") {
+ span.setStatus({
+ code: api.CanonicalCode.ALREADY_EXISTS,
+ message: "Expected exception when creating a container only if it does not already exist."
+ });
+ return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_51.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_51.response })];
}
- delete _this.keys[key];
- resolve();
- })];
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_51.message
+ });
+ throw e_51;
+ case 4:
+ span.end();
+ return [7 /*endfinally*/];
+ case 5: return [2 /*return*/];
+ }
});
});
};
- Mutex.onUnlockEvent = function (key, handler) {
- if (this.listeners[key] === undefined) {
- this.listeners[key] = [handler];
- }
- else {
- this.listeners[key].push(handler);
- }
- };
- Mutex.emitUnlockEvent = function (key) {
- var _this = this;
- if (this.listeners[key] !== undefined && this.listeners[key].length > 0) {
- var handler_1 = this.listeners[key].shift();
- setImmediate(function () {
- handler_1.call(_this);
+ /**
+ * Returns true if the Azure container resource represented by this client exists; false otherwise.
+ *
+ * NOTE: use this function with care since an existing container might be deleted by other clients or
+ * applications. Vice versa new containers with the same name might be added by other clients or
+ * applications after this function completes.
+ *
+ * @param {ContainerExistsOptions} [options={}]
+ * @returns {Promise}
+ * @memberof ContainerClient
+ */
+ ContainerClient.prototype.exists = function (options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var _a, span, spanOptions, e_52;
+ return tslib.__generator(this, function (_b) {
+ switch (_b.label) {
+ case 0:
+ _a = createSpan("ContainerClient-exists", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _b.label = 1;
+ case 1:
+ _b.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this.getProperties({
+ abortSignal: options.abortSignal,
+ tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions })
+ })];
+ case 2:
+ _b.sent();
+ return [2 /*return*/, true];
+ case 3:
+ e_52 = _b.sent();
+ if (e_52.statusCode === 404) {
+ span.setStatus({
+ code: api.CanonicalCode.NOT_FOUND,
+ message: "Expected exception when checking container existence"
+ });
+ return [2 /*return*/, false];
+ }
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_52.message
+ });
+ throw e_52;
+ case 4:
+ span.end();
+ return [7 /*endfinally*/];
+ case 5: return [2 /*return*/];
+ }
});
- }
+ });
};
- Mutex.keys = {};
- Mutex.listeners = {};
- return Mutex;
-}());
-
-/**
- * A BlobBatch represents an aggregated set of operations on blobs.
- * Currently, only `delete` and `setAccessTier` are supported.
- *
- * @export
- * @class BlobBatch
- */
-var BlobBatch = /** @class */ (function () {
- function BlobBatch() {
- this.batch = "batch";
- this.batchRequest = new InnerBatchRequest();
- }
/**
- * Get the value of Content-Type for a batch request.
- * The value must be multipart/mixed with a batch boundary.
- * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252
+ * Creates a {@link BlobClient}
+ *
+ * @param {string} blobName A blob name
+ * @returns {BlobClient} A new BlobClient object for the given blob name.
+ * @memberof ContainerClient
*/
- BlobBatch.prototype.getMultiPartContentType = function () {
- return this.batchRequest.getMultipartContentType();
+ ContainerClient.prototype.getBlobClient = function (blobName) {
+ return new BlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline);
};
/**
- * Get assembled HTTP request body for sub requests.
+ * Creates an {@link AppendBlobClient}
+ *
+ * @param {string} blobName An append blob name
+ * @returns {AppendBlobClient}
+ * @memberof ContainerClient
*/
- BlobBatch.prototype.getHttpRequestBody = function () {
- return this.batchRequest.getHttpRequestBody();
+ ContainerClient.prototype.getAppendBlobClient = function (blobName) {
+ return new AppendBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline);
};
/**
- * Get sub requests that are added into the batch request.
+ * Creates a {@link BlockBlobClient}
+ *
+ * @param {string} blobName A block blob name
+ * @returns {BlockBlobClient}
+ * @memberof ContainerClient
+ *
+ * Example usage:
+ *
+ * ```js
+ * const content = "Hello world!";
+ *
+ * const blockBlobClient = containerClient.getBlockBlobClient("");
+ * const uploadBlobResponse = await blockBlobClient.upload(content, content.length);
+ * ```
*/
- BlobBatch.prototype.getSubRequests = function () {
- return this.batchRequest.getSubRequests();
+ ContainerClient.prototype.getBlockBlobClient = function (blobName) {
+ return new BlockBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline);
};
- BlobBatch.prototype.addSubRequestInternal = function (subRequest, assembleSubRequestFunc) {
+ /**
+ * Creates a {@link PageBlobClient}
+ *
+ * @param {string} blobName A page blob name
+ * @returns {PageBlobClient}
+ * @memberof ContainerClient
+ */
+ ContainerClient.prototype.getPageBlobClient = function (blobName) {
+ return new PageBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline);
+ };
+ /**
+ * Returns all user-defined metadata and system properties for the specified
+ * container. The data returned does not include the container's list of blobs.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties
+ *
+ * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if
+ * they originally contained uppercase characters. This differs from the metadata keys returned by
+ * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which
+ * will retain their original casing.
+ *
+ * @param {ContainerGetPropertiesOptions} [options] Options to Container Get Properties operation.
+ * @returns {Promise}
+ * @memberof ContainerClient
+ */
+ ContainerClient.prototype.getProperties = function (options) {
+ if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0: return [4 /*yield*/, Mutex.lock(this.batch)];
+ var _a, span, spanOptions, e_53;
+ return tslib.__generator(this, function (_b) {
+ switch (_b.label) {
+ case 0:
+ if (!options.conditions) {
+ options.conditions = {};
+ }
+ _a = createSpan("ContainerClient-getProperties", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _b.label = 1;
case 1:
- _a.sent();
- _a.label = 2;
- case 2:
- _a.trys.push([2, , 4, 6]);
- this.batchRequest.preAddSubRequest(subRequest);
- return [4 /*yield*/, assembleSubRequestFunc()];
+ _b.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this.containerContext.getProperties(tslib.__assign(tslib.__assign({ abortSignal: options.abortSignal }, options.conditions), { spanOptions: spanOptions }))];
+ case 2: return [2 /*return*/, _b.sent()];
case 3:
- _a.sent();
- this.batchRequest.postAddSubRequest(subRequest);
- return [3 /*break*/, 6];
- case 4: return [4 /*yield*/, Mutex.unlock(this.batch)];
- case 5:
- _a.sent();
+ e_53 = _b.sent();
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_53.message
+ });
+ throw e_53;
+ case 4:
+ span.end();
return [7 /*endfinally*/];
- case 6: return [2 /*return*/];
+ case 5: return [2 /*return*/];
}
});
});
};
- BlobBatch.prototype.setBatchType = function (batchType) {
- if (!this.batchType) {
- this.batchType = batchType;
- }
- if (this.batchType !== batchType) {
- throw new RangeError("BlobBatch only supports one operation type per batch and it already is being used for " + this.batchType + " operations.");
- }
- };
- BlobBatch.prototype.deleteBlob = function (urlOrBlobClient, credentialOrOptions, options) {
+ /**
+ * Marks the specified container for deletion. The container and any blobs
+ * contained within it are later deleted during garbage collection.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container
+ *
+ * @param {ContainerDeleteMethodOptions} [options] Options to Container Delete operation.
+ * @returns {Promise}
+ * @memberof ContainerClient
+ */
+ ContainerClient.prototype.delete = function (options) {
+ if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var url, credential, _a, span, spanOptions, e_1;
- var _this = this;
+ var _a, span, spanOptions, e_54;
return tslib.__generator(this, function (_b) {
switch (_b.label) {
case 0:
- if (typeof urlOrBlobClient === "string" &&
- ((coreHttp.isNode && credentialOrOptions instanceof StorageSharedKeyCredential) ||
- credentialOrOptions instanceof AnonymousCredential ||
- coreHttp.isTokenCredential(credentialOrOptions))) {
- // First overload
- url = urlOrBlobClient;
- credential = credentialOrOptions;
- }
- else if (urlOrBlobClient instanceof BlobClient) {
- // Second overload
- url = urlOrBlobClient.url;
- credential = urlOrBlobClient.credential;
- options = credentialOrOptions;
- }
- else {
- throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided.");
- }
- if (!options) {
- options = {};
+ if (!options.conditions) {
+ options.conditions = {};
}
- _a = createSpan("BatchDeleteRequest-addSubRequest", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _a = createSpan("ContainerClient-delete", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
_b.label = 1;
case 1:
_b.trys.push([1, 3, 4, 5]);
- this.setBatchType("delete");
- return [4 /*yield*/, this.addSubRequestInternal({
- url: url,
- credential: credential
- }, function () { return tslib.__awaiter(_this, void 0, void 0, function () {
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0: return [4 /*yield*/, new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
- case 1:
- _a.sent();
- return [2 /*return*/];
- }
- });
- }); })];
+ return [4 /*yield*/, this.containerContext.deleteMethod({
+ abortSignal: options.abortSignal,
+ leaseAccessConditions: options.conditions,
+ modifiedAccessConditions: options.conditions,
+ spanOptions: spanOptions
+ })];
+ case 2: return [2 /*return*/, _b.sent()];
+ case 3:
+ e_54 = _b.sent();
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_54.message
+ });
+ throw e_54;
+ case 4:
+ span.end();
+ return [7 /*endfinally*/];
+ case 5: return [2 /*return*/];
+ }
+ });
+ });
+ };
+ /**
+ * Marks the specified container for deletion if it exists. The container and any blobs
+ * contained within it are later deleted during garbage collection.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container
+ *
+ * @param {ContainerDeleteMethodOptions} [options] Options to Container Delete operation.
+ * @returns {Promise}
+ * @memberof ContainerClient
+ */
+ ContainerClient.prototype.deleteIfExists = function (options) {
+ var _a, _b;
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var _c, span, spanOptions, res, e_55;
+ return tslib.__generator(this, function (_d) {
+ switch (_d.label) {
+ case 0:
+ _c = createSpan("ContainerClient-deleteIfExists", options.tracingOptions), span = _c.span, spanOptions = _c.spanOptions;
+ _d.label = 1;
+ case 1:
+ _d.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this.delete(tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
case 2:
- _b.sent();
- return [3 /*break*/, 5];
+ res = _d.sent();
+ return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable
+ })];
case 3:
- e_1 = _b.sent();
+ e_55 = _d.sent();
+ if (((_a = e_55.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") {
+ span.setStatus({
+ code: api.CanonicalCode.NOT_FOUND,
+ message: "Expected exception when deleting a container only if it exists."
+ });
+ return [2 /*return*/, tslib.__assign(tslib.__assign({ succeeded: false }, (_b = e_55.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e_55.response })];
+ }
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_1.message
+ message: e_55.message
});
- throw e_1;
+ throw e_55;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -52693,63 +52990,52 @@ var BlobBatch = /** @class */ (function () {
});
});
};
- BlobBatch.prototype.setBlobAccessTier = function (urlOrBlobClient, credentialOrTier, tierOrOptions, options) {
+ /**
+ * Sets one or more user-defined name-value pairs for the specified container.
+ *
+ * If no option provided, or no metadata defined in the parameter, the container
+ * metadata will be removed.
+ *
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata
+ *
+ * @param {Metadata} [metadata] Replace existing metadata with this value.
+ * If no value provided the existing metadata will be removed.
+ * @param {ContainerSetMetadataOptions} [options] Options to Container Set Metadata operation.
+ * @returns {Promise}
+ * @memberof ContainerClient
+ */
+ ContainerClient.prototype.setMetadata = function (metadata, options) {
+ if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var url, credential, tier, _a, span, spanOptions, e_2;
- var _this = this;
+ var _a, span, spanOptions, e_56;
return tslib.__generator(this, function (_b) {
switch (_b.label) {
case 0:
- if (typeof urlOrBlobClient === "string" &&
- ((coreHttp.isNode && credentialOrTier instanceof StorageSharedKeyCredential) ||
- credentialOrTier instanceof AnonymousCredential ||
- coreHttp.isTokenCredential(credentialOrTier))) {
- // First overload
- url = urlOrBlobClient;
- credential = credentialOrTier;
- tier = tierOrOptions;
- }
- else if (urlOrBlobClient instanceof BlobClient) {
- // Second overload
- url = urlOrBlobClient.url;
- credential = urlOrBlobClient.credential;
- tier = credentialOrTier;
- options = tierOrOptions;
- }
- else {
- throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided.");
+ if (!options.conditions) {
+ options.conditions = {};
}
- if (!options) {
- options = {};
+ if (options.conditions.ifUnmodifiedSince) {
+ throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service");
}
- _a = createSpan("BatchSetTierRequest-addSubRequest", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _a = createSpan("ContainerClient-setMetadata", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
_b.label = 1;
case 1:
_b.trys.push([1, 3, 4, 5]);
- this.setBatchType("setAccessTier");
- return [4 /*yield*/, this.addSubRequestInternal({
- url: url,
- credential: credential
- }, function () { return tslib.__awaiter(_this, void 0, void 0, function () {
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0: return [4 /*yield*/, new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
- case 1:
- _a.sent();
- return [2 /*return*/];
- }
- });
- }); })];
- case 2:
- _b.sent();
- return [3 /*break*/, 5];
+ return [4 /*yield*/, this.containerContext.setMetadata({
+ abortSignal: options.abortSignal,
+ leaseAccessConditions: options.conditions,
+ metadata: metadata,
+ modifiedAccessConditions: options.conditions,
+ spanOptions: spanOptions
+ })];
+ case 2: return [2 /*return*/, _b.sent()];
case 3:
- e_2 = _b.sent();
+ e_56 = _b.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_2.message
+ message: e_56.message
});
- throw e_2;
+ throw e_56;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -52758,447 +53044,217 @@ var BlobBatch = /** @class */ (function () {
});
});
};
- return BlobBatch;
-}());
-/**
- * Inner batch request class which is responsible for assembling and serializing sub requests.
- * See https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#request-body for how requests are assembled.
- */
-var InnerBatchRequest = /** @class */ (function () {
- function InnerBatchRequest() {
- this.operationCount = 0;
- this.body = "";
- var tempGuid = coreHttp.generateUuid();
- // batch_{batchid}
- this.boundary = "batch_" + tempGuid;
- // --batch_{batchid}
- // Content-Type: application/http
- // Content-Transfer-Encoding: binary
- this.subRequestPrefix = "--" + this.boundary + HTTP_LINE_ENDING + HeaderConstants.CONTENT_TYPE + ": application/http" + HTTP_LINE_ENDING + HeaderConstants.CONTENT_TRANSFER_ENCODING + ": binary";
- // multipart/mixed; boundary=batch_{batchid}
- this.multipartContentType = "multipart/mixed; boundary=" + this.boundary;
- // --batch_{batchid}--
- this.batchRequestEnding = "--" + this.boundary + "--";
- this.subRequests = new Map();
- }
/**
- * Create pipeline to assemble sub requests. The idea here is to use existing
- * credential and serialization/deserialization components, with additional policies to
- * filter unnecessary headers, assemble sub requests into request's body
- * and intercept request from going to wire.
- * @param {StorageSharedKeyCredential | AnonymousCredential | TokenCredential} credential Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the @azure/identity package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.
+ * Gets the permissions for the specified container. The permissions indicate
+ * whether container data may be accessed publicly.
+ *
+ * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings.
+ * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z".
+ *
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl
+ *
+ * @param {ContainerGetAccessPolicyOptions} [options] Options to Container Get Access Policy operation.
+ * @returns {Promise}
+ * @memberof ContainerClient
*/
- InnerBatchRequest.prototype.createPipeline = function (credential) {
- var isAnonymousCreds = credential instanceof AnonymousCredential;
- var policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); // [deserializationPolicy, BatchHeaderFilterPolicyFactory, (Optional)Credential, BatchRequestAssemblePolicyFactory]
- var factories = new Array(policyFactoryLength);
- factories[0] = coreHttp.deserializationPolicy(); // Default deserializationPolicy is provided by protocol layer
- factories[1] = new BatchHeaderFilterPolicyFactory(); // Use batch header filter policy to exclude unnecessary headers
- if (!isAnonymousCreds) {
- factories[2] = coreHttp.isTokenCredential(credential)
- ? coreHttp.bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes)
- : credential;
- }
- factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); // Use batch assemble policy to assemble request and intercept request from going to wire
- return new Pipeline(factories, {});
- };
- InnerBatchRequest.prototype.appendSubRequestToBody = function (request) {
- // Start to assemble sub request
- this.body += [
- this.subRequestPrefix,
- HeaderConstants.CONTENT_ID + ": " + this.operationCount,
- "",
- request.method.toString() + " " + getURLPathAndQuery(request.url) + " " + HTTP_VERSION_1_1 + HTTP_LINE_ENDING // sub request start line with method
- ].join(HTTP_LINE_ENDING);
- for (var _i = 0, _a = request.headers.headersArray(); _i < _a.length; _i++) {
- var header = _a[_i];
- this.body += header.name + ": " + header.value + HTTP_LINE_ENDING;
- }
- this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line
- // No body to assemble for current batch request support
- // End to assemble sub request
- };
- InnerBatchRequest.prototype.preAddSubRequest = function (subRequest) {
- if (this.operationCount >= BATCH_MAX_REQUEST) {
- throw new RangeError("Cannot exceed " + BATCH_MAX_REQUEST + " sub requests in a single batch");
- }
- // Fast fail if url for sub request is invalid
- var path = getURLPath(subRequest.url);
- if (!path || path == "") {
- throw new RangeError("Invalid url for sub request: '" + subRequest.url + "'");
- }
- };
- InnerBatchRequest.prototype.postAddSubRequest = function (subRequest) {
- this.subRequests.set(this.operationCount, subRequest);
- this.operationCount++;
- };
- // Return the http request body with assembling the ending line to the sub request body.
- InnerBatchRequest.prototype.getHttpRequestBody = function () {
- return "" + this.body + this.batchRequestEnding + HTTP_LINE_ENDING;
- };
- InnerBatchRequest.prototype.getMultipartContentType = function () {
- return this.multipartContentType;
- };
- InnerBatchRequest.prototype.getSubRequests = function () {
- return this.subRequests;
- };
- return InnerBatchRequest;
-}());
-var BatchRequestAssemblePolicy = /** @class */ (function (_super) {
- tslib.__extends(BatchRequestAssemblePolicy, _super);
- function BatchRequestAssemblePolicy(batchRequest, nextPolicy, options) {
- var _this = _super.call(this, nextPolicy, options) || this;
- _this.dummyResponse = {
- request: new coreHttp.WebResource(),
- status: 200,
- headers: new coreHttp.HttpHeaders()
- };
- _this.batchRequest = batchRequest;
- return _this;
- }
- BatchRequestAssemblePolicy.prototype.sendRequest = function (request) {
+ ContainerClient.prototype.getAccessPolicy = function (options) {
+ if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0: return [4 /*yield*/, this.batchRequest.appendSubRequestToBody(request)];
+ var _a, span, spanOptions, response, res, _i, response_1, identifier, accessPolicy, e_57;
+ return tslib.__generator(this, function (_b) {
+ switch (_b.label) {
+ case 0:
+ if (!options.conditions) {
+ options.conditions = {};
+ }
+ _a = createSpan("ContainerClient-getAccessPolicy", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _b.label = 1;
case 1:
- _a.sent();
- return [2 /*return*/, this.dummyResponse]; // Intercept request from going to wire
- }
- });
- });
- };
- return BatchRequestAssemblePolicy;
-}(coreHttp.BaseRequestPolicy));
-var BatchRequestAssemblePolicyFactory = /** @class */ (function () {
- function BatchRequestAssemblePolicyFactory(batchRequest) {
- this.batchRequest = batchRequest;
- }
- BatchRequestAssemblePolicyFactory.prototype.create = function (nextPolicy, options) {
- return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options);
- };
- return BatchRequestAssemblePolicyFactory;
-}());
-var BatchHeaderFilterPolicy = /** @class */ (function (_super) {
- tslib.__extends(BatchHeaderFilterPolicy, _super);
- function BatchHeaderFilterPolicy(nextPolicy, options) {
- return _super.call(this, nextPolicy, options) || this;
- }
- BatchHeaderFilterPolicy.prototype.sendRequest = function (request) {
- return tslib.__awaiter(this, void 0, void 0, function () {
- var xMsHeaderName, _i, _a, header;
- return tslib.__generator(this, function (_b) {
- xMsHeaderName = "";
- for (_i = 0, _a = request.headers.headersArray(); _i < _a.length; _i++) {
- header = _a[_i];
- if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) {
- xMsHeaderName = header.name;
- }
- }
- if (xMsHeaderName !== "") {
- request.headers.remove(xMsHeaderName); // The subrequests should not have the x-ms-version header.
- }
- return [2 /*return*/, this._nextPolicy.sendRequest(request)];
- });
- });
- };
- return BatchHeaderFilterPolicy;
-}(coreHttp.BaseRequestPolicy));
-var BatchHeaderFilterPolicyFactory = /** @class */ (function () {
- function BatchHeaderFilterPolicyFactory() {
- }
- BatchHeaderFilterPolicyFactory.prototype.create = function (nextPolicy, options) {
- return new BatchHeaderFilterPolicy(nextPolicy, options);
- };
- return BatchHeaderFilterPolicyFactory;
-}());
-
-// Copyright (c) Microsoft Corporation. All rights reserved.
-/**
- * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service.
- *
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch
- */
-var BlobBatchClient = /** @class */ (function () {
- function BlobBatchClient(url, credentialOrPipeline, options) {
- var pipeline;
- if (credentialOrPipeline instanceof Pipeline) {
- pipeline = credentialOrPipeline;
- }
- else if (!credentialOrPipeline) {
- // no credential provided
- pipeline = newPipeline(new AnonymousCredential(), options);
- }
- else {
- pipeline = newPipeline(credentialOrPipeline, options);
- }
- var storageClientContext = new StorageClientContext(url, pipeline.toServiceClientOptions());
- this._serviceContext = new Service(storageClientContext);
- }
- /**
- * Creates a {@link BlobBatch}.
- * A BlobBatch represents an aggregated set of operations on blobs.
- */
- BlobBatchClient.prototype.createBatch = function () {
- return new BlobBatch();
- };
- BlobBatchClient.prototype.deleteBlobs = function (urlsOrBlobClients, credentialOrOptions, options) {
- return tslib.__awaiter(this, void 0, void 0, function () {
- var batch, _i, urlsOrBlobClients_1, urlOrBlobClient;
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0:
- batch = new BlobBatch();
- _i = 0, urlsOrBlobClients_1 = urlsOrBlobClients;
- _a.label = 1;
- case 1:
- if (!(_i < urlsOrBlobClients_1.length)) return [3 /*break*/, 6];
- urlOrBlobClient = urlsOrBlobClients_1[_i];
- if (!(typeof urlOrBlobClient === "string")) return [3 /*break*/, 3];
- return [4 /*yield*/, batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options)];
- case 2:
- _a.sent();
- return [3 /*break*/, 5];
- case 3: return [4 /*yield*/, batch.deleteBlob(urlOrBlobClient, credentialOrOptions)];
- case 4:
- _a.sent();
- _a.label = 5;
- case 5:
- _i++;
- return [3 /*break*/, 1];
- case 6: return [2 /*return*/, this.submitBatch(batch)];
- }
- });
- });
- };
- BlobBatchClient.prototype.setBlobsAccessTier = function (urlsOrBlobClients, credentialOrTier, tierOrOptions, options) {
- return tslib.__awaiter(this, void 0, void 0, function () {
- var batch, _i, urlsOrBlobClients_2, urlOrBlobClient;
- return tslib.__generator(this, function (_a) {
- switch (_a.label) {
- case 0:
- batch = new BlobBatch();
- _i = 0, urlsOrBlobClients_2 = urlsOrBlobClients;
- _a.label = 1;
- case 1:
- if (!(_i < urlsOrBlobClients_2.length)) return [3 /*break*/, 6];
- urlOrBlobClient = urlsOrBlobClients_2[_i];
- if (!(typeof urlOrBlobClient === "string")) return [3 /*break*/, 3];
- return [4 /*yield*/, batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options)];
+ _b.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this.containerContext.getAccessPolicy({
+ abortSignal: options.abortSignal,
+ leaseAccessConditions: options.conditions,
+ spanOptions: spanOptions
+ })];
case 2:
- _a.sent();
- return [3 /*break*/, 5];
- case 3: return [4 /*yield*/, batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions)];
+ response = _b.sent();
+ res = {
+ _response: response._response,
+ blobPublicAccess: response.blobPublicAccess,
+ date: response.date,
+ etag: response.etag,
+ errorCode: response.errorCode,
+ lastModified: response.lastModified,
+ requestId: response.requestId,
+ clientRequestId: response.clientRequestId,
+ signedIdentifiers: [],
+ version: response.version
+ };
+ for (_i = 0, response_1 = response; _i < response_1.length; _i++) {
+ identifier = response_1[_i];
+ accessPolicy = undefined;
+ if (identifier.accessPolicy) {
+ accessPolicy = {
+ permissions: identifier.accessPolicy.permissions
+ };
+ if (identifier.accessPolicy.expiresOn) {
+ accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn);
+ }
+ if (identifier.accessPolicy.startsOn) {
+ accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn);
+ }
+ }
+ res.signedIdentifiers.push({
+ accessPolicy: accessPolicy,
+ id: identifier.id
+ });
+ }
+ return [2 /*return*/, res];
+ case 3:
+ e_57 = _b.sent();
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_57.message
+ });
+ throw e_57;
case 4:
- _a.sent();
- _a.label = 5;
- case 5:
- _i++;
- return [3 /*break*/, 1];
- case 6: return [2 /*return*/, this.submitBatch(batch)];
+ span.end();
+ return [7 /*endfinally*/];
+ case 5: return [2 /*return*/];
}
});
});
};
/**
- * Submit batch request which consists of multiple subrequests.
- *
- * Get `blobBatchClient` and other details before running the snippets.
- * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient`
- *
- * Example usage:
- *
- * ```js
- * let batchRequest = new BlobBatch();
- * await batchRequest.deleteBlob(urlInString0, credential0);
- * await batchRequest.deleteBlob(urlInString1, credential1, {
- * deleteSnapshots: "include"
- * });
- * const batchResp = await blobBatchClient.submitBatch(batchRequest);
- * console.log(batchResp.subResponsesSucceededCount);
- * ```
- *
- * Example using a lease:
+ * Sets the permissions for the specified container. The permissions indicate
+ * whether blobs in a container may be accessed publicly.
*
- * ```js
- * let batchRequest = new BlobBatch();
- * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool");
- * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", {
- * conditions: { leaseId: leaseId }
- * });
- * const batchResp = await blobBatchClient.submitBatch(batchRequest);
- * console.log(batchResp.subResponsesSucceededCount);
- * ```
+ * When you set permissions for a container, the existing permissions are replaced.
+ * If no access or containerAcl provided, the existing container ACL will be
+ * removed.
*
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch
+ * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect.
+ * During this interval, a shared access signature that is associated with the stored access policy will
+ * fail with status code 403 (Forbidden), until the access policy becomes active.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl
*
- * @param {BlobBatch} batchRequest A set of Delete or SetTier operations.
- * @param {BlobBatchSubmitBatchOptionalParams} [options]
- * @returns {Promise}
- * @memberof BlobBatchClient
+ * @param {PublicAccessType} [access] The level of public access to data in the container.
+ * @param {SignedIdentifier[]} [containerAcl] Array of elements each having a unique Id and details of the access policy.
+ * @param {ContainerSetAccessPolicyOptions} [options] Options to Container Set Access Policy operation.
+ * @returns {Promise}
+ * @memberof ContainerClient
*/
- BlobBatchClient.prototype.submitBatch = function (batchRequest, options) {
+ ContainerClient.prototype.setAccessPolicy = function (access, containerAcl, options) {
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, batchRequestBody, rawBatchResponse, batchResponseParser, responseSummary, res, e_1;
- return tslib.__generator(this, function (_b) {
- switch (_b.label) {
+ var _a, span, spanOptions, acl, _i, _b, identifier, e_58;
+ return tslib.__generator(this, function (_c) {
+ switch (_c.label) {
case 0:
- if (!batchRequest || batchRequest.getSubRequests().size == 0) {
- throw new RangeError("Batch request should contain one or more sub requests.");
- }
- _a = createSpan("BlobBatchClient-submitBatch", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
- _b.label = 1;
+ options.conditions = options.conditions || {};
+ _a = createSpan("ContainerClient-setAccessPolicy", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _c.label = 1;
case 1:
- _b.trys.push([1, 4, 5, 6]);
- batchRequestBody = batchRequest.getHttpRequestBody();
- return [4 /*yield*/, this._serviceContext.submitBatch(batchRequestBody, utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), tslib.__assign(tslib.__assign({}, options), { spanOptions: spanOptions }))];
- case 2:
- rawBatchResponse = _b.sent();
- batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests());
- return [4 /*yield*/, batchResponseParser.parseBatchResponse()];
+ _c.trys.push([1, 3, 4, 5]);
+ acl = [];
+ for (_i = 0, _b = containerAcl || []; _i < _b.length; _i++) {
+ identifier = _b[_i];
+ acl.push({
+ accessPolicy: {
+ expiresOn: identifier.accessPolicy.expiresOn
+ ? truncatedISO8061Date(identifier.accessPolicy.expiresOn)
+ : "",
+ permissions: identifier.accessPolicy.permissions,
+ startsOn: identifier.accessPolicy.startsOn
+ ? truncatedISO8061Date(identifier.accessPolicy.startsOn)
+ : ""
+ },
+ id: identifier.id
+ });
+ }
+ return [4 /*yield*/, this.containerContext.setAccessPolicy({
+ abortSignal: options.abortSignal,
+ access: access,
+ containerAcl: acl,
+ leaseAccessConditions: options.conditions,
+ modifiedAccessConditions: options.conditions,
+ spanOptions: spanOptions
+ })];
+ case 2: return [2 /*return*/, _c.sent()];
case 3:
- responseSummary = _b.sent();
- res = {
- _response: rawBatchResponse._response,
- contentType: rawBatchResponse.contentType,
- errorCode: rawBatchResponse.errorCode,
- requestId: rawBatchResponse.requestId,
- clientRequestId: rawBatchResponse.clientRequestId,
- version: rawBatchResponse.version,
- subResponses: responseSummary.subResponses,
- subResponsesSucceededCount: responseSummary.subResponsesSucceededCount,
- subResponsesFailedCount: responseSummary.subResponsesFailedCount
- };
- return [2 /*return*/, res];
- case 4:
- e_1 = _b.sent();
+ e_58 = _c.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_1.message
+ message: e_58.message
});
- throw e_1;
- case 5:
+ throw e_58;
+ case 4:
span.end();
return [7 /*endfinally*/];
- case 6: return [2 /*return*/];
+ case 5: return [2 /*return*/];
}
});
});
};
- return BlobBatchClient;
-}());
-
-/**
- * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you
- * to manipulate blob containers.
- *
- * @export
- * @class BlobServiceClient
- */
-var BlobServiceClient = /** @class */ (function (_super) {
- tslib.__extends(BlobServiceClient, _super);
- function BlobServiceClient(url, credentialOrPipeline, options) {
- var _this = this;
- var pipeline;
- if (credentialOrPipeline instanceof Pipeline) {
- pipeline = credentialOrPipeline;
- }
- else if ((coreHttp.isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) ||
- credentialOrPipeline instanceof AnonymousCredential ||
- coreHttp.isTokenCredential(credentialOrPipeline)) {
- pipeline = newPipeline(credentialOrPipeline, options);
- }
- else {
- // The second parameter is undefined. Use anonymous credential
- pipeline = newPipeline(new AnonymousCredential(), options);
- }
- _this = _super.call(this, url, pipeline) || this;
- _this.serviceContext = new Service(_this.storageClientContext);
- return _this;
- }
/**
+ * Get a {@link BlobLeaseClient} that manages leases on the container.
*
- * Creates an instance of BlobServiceClient from connection string.
- *
- * @param {string} connectionString Account connection string or a SAS connection string of an Azure storage account.
- * [ Note - Account connection string can only be used in NODE.JS runtime. ]
- * Account connection string example -
- * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`
- * SAS connection string example -
- * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`
- * @param {StoragePipelineOptions} [options] Optional. Options to configure the HTTP pipeline.
- * @memberof BlobServiceClient
+ * @param {string} [proposeLeaseId] Initial proposed lease Id.
+ * @returns {BlobLeaseClient} A new BlobLeaseClient object for managing leases on the container.
+ * @memberof ContainerClient
*/
- BlobServiceClient.fromConnectionString = function (connectionString, options) {
- options = options || {};
- var extractedCreds = extractConnectionStringParts(connectionString);
- if (extractedCreds.kind === "AccountConnString") {
- {
- var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);
- options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri);
- var pipeline = newPipeline(sharedKeyCredential, options);
- return new BlobServiceClient(extractedCreds.url, pipeline);
- }
- }
- else if (extractedCreds.kind === "SASConnString") {
- var pipeline = newPipeline(new AnonymousCredential(), options);
- return new BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline);
- }
- else {
- throw new Error("Connection string must be either an Account connection string or a SAS connection string");
- }
+ ContainerClient.prototype.getBlobLeaseClient = function (proposeLeaseId) {
+ return new BlobLeaseClient(this, proposeLeaseId);
};
/**
- * Creates a {@link ContainerClient} object
+ * Creates a new block blob, or updates the content of an existing block blob.
*
- * @param {string} containerName A container name
- * @returns {ContainerClient} A new ContainerClient object for the given container name.
- * @memberof BlobServiceClient
+ * Updating an existing block blob overwrites any existing metadata on the blob.
+ * Partial updates are not supported; the content of the existing blob is
+ * overwritten with the new content. To perform a partial update of a block blob's,
+ * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}.
*
- * Example usage:
+ * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile},
+ * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better
+ * performance with concurrency uploading.
*
- * ```js
- * const containerClient = blobServiceClient.getContainerClient("");
- * ```
- */
- BlobServiceClient.prototype.getContainerClient = function (containerName) {
- return new ContainerClient(appendToURLPath(this.url, encodeURIComponent(containerName)), this.pipeline);
- };
- /**
- * Create a Blob container.
+ * @see https://docs.microsoft.com/rest/api/storageservices/put-blob
*
- * @param {string} containerName Name of the container to create.
- * @param {ContainerCreateOptions} [options] Options to configure Container Create operation.
- * @returns {Promise<{ containerClient: ContainerClient; containerCreateResponse: ContainerCreateResponse }>} Container creation response and the corresponding container client.
- * @memberof BlobServiceClient
+ * @param {string} blobName Name of the block blob to create or update.
+ * @param {HttpRequestBody} body Blob, string, ArrayBuffer, ArrayBufferView or a function
+ * which returns a new Readable stream whose offset is from data source beginning.
+ * @param {number} contentLength Length of body in bytes. Use Buffer.byteLength() to calculate body length for a
+ * string including non non-Base64/Hex-encoded characters.
+ * @param {BlockBlobUploadOptions} [options] Options to configure the Block Blob Upload operation.
+ * @returns {Promise<{ blockBlobClient: BlockBlobClient; response: BlockBlobUploadResponse }>} Block Blob upload response data and the corresponding BlockBlobClient instance.
+ * @memberof ContainerClient
*/
- BlobServiceClient.prototype.createContainer = function (containerName, options) {
+ ContainerClient.prototype.uploadBlockBlob = function (blobName, body, contentLength, options) {
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, containerClient, containerCreateResponse, e_1;
+ var _a, span, spanOptions, blockBlobClient, response, e_59;
return tslib.__generator(this, function (_b) {
switch (_b.label) {
case 0:
- _a = createSpan("BlobServiceClient-createContainer", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _a = createSpan("ContainerClient-uploadBlockBlob", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
_b.label = 1;
case 1:
_b.trys.push([1, 3, 4, 5]);
- containerClient = this.getContainerClient(containerName);
- return [4 /*yield*/, containerClient.create(tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
+ blockBlobClient = this.getBlockBlobClient(blobName);
+ return [4 /*yield*/, blockBlobClient.upload(body, contentLength, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
case 2:
- containerCreateResponse = _b.sent();
+ response = _b.sent();
return [2 /*return*/, {
- containerClient: containerClient,
- containerCreateResponse: containerCreateResponse
+ blockBlobClient: blockBlobClient,
+ response: response
}];
case 3:
- e_1 = _b.sent();
+ e_59 = _b.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_1.message
+ message: e_59.message
});
- throw e_1;
+ throw e_59;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -53208,34 +53264,41 @@ var BlobServiceClient = /** @class */ (function (_super) {
});
};
/**
- * Deletes a Blob container.
+ * Marks the specified blob or snapshot for deletion. The blob is later deleted
+ * during garbage collection. Note that in order to delete a blob, you must delete
+ * all of its snapshots. You can delete both at the same time with the Delete
+ * Blob operation.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob
*
- * @param {string} containerName Name of the container to delete.
- * @param {ContainerDeleteMethodOptions} [options] Options to configure Container Delete operation.
- * @returns {Promise} Container deletion response.
- * @memberof BlobServiceClient
+ * @param {string} blobName
+ * @param {ContainerDeleteBlobOptions} [options] Options to Blob Delete operation.
+ * @returns {Promise} Block blob deletion response data.
+ * @memberof ContainerClient
*/
- BlobServiceClient.prototype.deleteContainer = function (containerName, options) {
+ ContainerClient.prototype.deleteBlob = function (blobName, options) {
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, containerClient, e_2;
+ var _a, span, spanOptions, blobClient, e_60;
return tslib.__generator(this, function (_b) {
switch (_b.label) {
case 0:
- _a = createSpan("BlobServiceClient-deleteContainer", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _a = createSpan("ContainerClient-deleteBlob", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
_b.label = 1;
case 1:
_b.trys.push([1, 3, 4, 5]);
- containerClient = this.getContainerClient(containerName);
- return [4 /*yield*/, containerClient.delete(tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
+ blobClient = this.getBlobClient(blobName);
+ if (options.versionId) {
+ blobClient = blobClient.withVersion(options.versionId);
+ }
+ return [4 /*yield*/, blobClient.delete(tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
case 2: return [2 /*return*/, _b.sent()];
case 3:
- e_2 = _b.sent();
+ e_60 = _b.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_2.message
+ message: e_60.message
});
- throw e_2;
+ throw e_60;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -53245,37 +53308,43 @@ var BlobServiceClient = /** @class */ (function (_super) {
});
};
/**
- * Gets the properties of a storage account’s Blob service, including properties
- * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties
+ * listBlobFlatSegment returns a single segment of blobs starting from the
+ * specified Marker. Use an empty Marker to start enumeration from the beginning.
+ * After getting a segment, process it, and then call listBlobsFlatSegment again
+ * (passing the the previously-returned Marker) to get the next segment.
+ * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs
*
- * @param {ServiceGetPropertiesOptions} [options] Options to the Service Get Properties operation.
- * @returns {Promise} Response data for the Service Get Properties operation.
- * @memberof BlobServiceClient
+ * @param {string} [marker] A string value that identifies the portion of the list to be returned with the next list operation.
+ * @param {ContainerListBlobsSegmentOptions} [options] Options to Container List Blob Flat Segment operation.
+ * @returns {Promise}
+ * @memberof ContainerClient
*/
- BlobServiceClient.prototype.getProperties = function (options) {
+ ContainerClient.prototype.listBlobFlatSegment = function (marker, options) {
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, e_3;
+ var _a, span, spanOptions, response, wrappedResponse, e_61;
return tslib.__generator(this, function (_b) {
switch (_b.label) {
case 0:
- _a = createSpan("BlobServiceClient-getProperties", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _a = createSpan("ContainerClient-listBlobFlatSegment", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
_b.label = 1;
case 1:
_b.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.serviceContext.getProperties({
- abortSignal: options.abortSignal,
- spanOptions: spanOptions
- })];
- case 2: return [2 /*return*/, _b.sent()];
+ return [4 /*yield*/, this.containerContext.listBlobFlatSegment(tslib.__assign(tslib.__assign({ marker: marker }, options), { spanOptions: spanOptions }))];
+ case 2:
+ response = _b.sent();
+ wrappedResponse = tslib.__assign(tslib.__assign({}, response), { _response: response._response, segment: tslib.__assign(tslib.__assign({}, response.segment), { blobItems: response.segment.blobItems.map(function (blobItemInteral) {
+ var blobItem = tslib.__assign(tslib.__assign({}, blobItemInteral), { tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) });
+ return blobItem;
+ }) }) });
+ return [2 /*return*/, wrappedResponse];
case 3:
- e_3 = _b.sent();
+ e_61 = _b.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_3.message
+ message: e_61.message
});
- throw e_3;
+ throw e_61;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -53285,219 +53354,44 @@ var BlobServiceClient = /** @class */ (function (_super) {
});
};
/**
- * Sets properties for a storage account’s Blob service endpoint, including properties
- * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties}
+ * listBlobHierarchySegment returns a single segment of blobs starting from
+ * the specified Marker. Use an empty Marker to start enumeration from the
+ * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment
+ * again (passing the the previously-returned Marker) to get the next segment.
+ * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs
*
- * @param {BlobServiceProperties} properties
- * @param {ServiceSetPropertiesOptions} [options] Options to the Service Set Properties operation.
- * @returns {Promise} Response data for the Service Set Properties operation.
- * @memberof BlobServiceClient
+ * @param {string} delimiter The character or string used to define the virtual hierarchy
+ * @param {string} [marker] A string value that identifies the portion of the list to be returned with the next list operation.
+ * @param {ContainerListBlobsSegmentOptions} [options] Options to Container List Blob Hierarchy Segment operation.
+ * @returns {Promise}
+ * @memberof ContainerClient
*/
- BlobServiceClient.prototype.setProperties = function (properties, options) {
+ ContainerClient.prototype.listBlobHierarchySegment = function (delimiter, marker, options) {
if (options === void 0) { options = {}; }
return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, e_4;
+ var _a, span, spanOptions, response, wrappedResponse, e_62;
return tslib.__generator(this, function (_b) {
switch (_b.label) {
case 0:
- _a = createSpan("BlobServiceClient-setProperties", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _a = createSpan("ContainerClient-listBlobHierarchySegment", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
_b.label = 1;
case 1:
_b.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.serviceContext.setProperties(properties, {
- abortSignal: options.abortSignal,
- spanOptions: spanOptions
- })];
- case 2: return [2 /*return*/, _b.sent()];
- case 3:
- e_4 = _b.sent();
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_4.message
- });
- throw e_4;
- case 4:
- span.end();
- return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
- }
- });
- });
- };
- /**
- * Retrieves statistics related to replication for the Blob service. It is only
- * available on the secondary location endpoint when read-access geo-redundant
- * replication is enabled for the storage account.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats}
- *
- * @param {ServiceGetStatisticsOptions} [options] Options to the Service Get Statistics operation.
- * @returns {Promise} Response data for the Service Get Statistics operation.
- * @memberof BlobServiceClient
- */
- BlobServiceClient.prototype.getStatistics = function (options) {
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, e_5;
- return tslib.__generator(this, function (_b) {
- switch (_b.label) {
- case 0:
- _a = createSpan("BlobServiceClient-getStatistics", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
- _b.label = 1;
- case 1:
- _b.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.serviceContext.getStatistics({
- abortSignal: options.abortSignal,
- spanOptions: spanOptions
- })];
- case 2: return [2 /*return*/, _b.sent()];
- case 3:
- e_5 = _b.sent();
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_5.message
- });
- throw e_5;
- case 4:
- span.end();
- return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
- }
- });
- });
- };
- /**
- * The Get Account Information operation returns the sku name and account kind
- * for the specified account.
- * The Get Account Information operation is available on service versions beginning
- * with version 2018-03-28.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information
- *
- * @param {ServiceGetAccountInfoOptions} [options] Options to the Service Get Account Info operation.
- * @returns {Promise} Response data for the Service Get Account Info operation.
- * @memberof BlobServiceClient
- */
- BlobServiceClient.prototype.getAccountInfo = function (options) {
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, e_6;
- return tslib.__generator(this, function (_b) {
- switch (_b.label) {
- case 0:
- _a = createSpan("BlobServiceClient-getAccountInfo", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
- _b.label = 1;
- case 1:
- _b.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.serviceContext.getAccountInfo({
- abortSignal: options.abortSignal,
- spanOptions: spanOptions
- })];
- case 2: return [2 /*return*/, _b.sent()];
- case 3:
- e_6 = _b.sent();
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_6.message
- });
- throw e_6;
- case 4:
- span.end();
- return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
- }
- });
- });
- };
- /**
- * Returns a list of the containers under the specified account.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2
- *
- * @param {string} [marker] A string value that identifies the portion of
- * the list of containers to be returned with the next listing operation. The
- * operation returns the NextMarker value within the response body if the
- * listing operation did not return all containers remaining to be listed
- * with the current page. The NextMarker value can be used as the value for
- * the marker parameter in a subsequent call to request the next page of list
- * items. The marker value is opaque to the client.
- * @param {ServiceListContainersSegmentOptions} [options] Options to the Service List Container Segment operation.
- * @returns {Promise} Response data for the Service List Container Segment operation.
- * @memberof BlobServiceClient
- */
- BlobServiceClient.prototype.listContainersSegment = function (marker, options) {
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, e_7;
- return tslib.__generator(this, function (_b) {
- switch (_b.label) {
- case 0:
- _a = createSpan("BlobServiceClient-listContainersSegment", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
- _b.label = 1;
- case 1:
- _b.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.serviceContext.listContainersSegment(tslib.__assign(tslib.__assign({ abortSignal: options.abortSignal, marker: marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include, spanOptions: spanOptions }))];
- case 2: return [2 /*return*/, _b.sent()];
- case 3:
- e_7 = _b.sent();
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_7.message
- });
- throw e_7;
- case 4:
- span.end();
- return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
- }
- });
- });
- };
- /**
- * The Filter Blobs operation enables callers to list blobs across all containers whose tags
- * match a given search expression. Filter blobs searches across all containers within a
- * storage account but can be scoped within the expression to a single container.
- *
- * @private
- * @param {string} tagFilterSqlExpression The where parameter enables the caller to query blobs whose tags match a given expression.
- * The given expression must evaluate to true for a blob to be returned in the results.
- * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;
- * however, only a subset of the OData filter syntax is supported in the Blob service.
- * @param {string} [marker] A string value that identifies the portion of
- * the list of blobs to be returned with the next listing operation. The
- * operation returns the NextMarker value within the response body if the
- * listing operation did not return all blobs remaining to be listed
- * with the current page. The NextMarker value can be used as the value for
- * the marker parameter in a subsequent call to request the next page of list
- * items. The marker value is opaque to the client.
- * @param {ServiceFindBlobsByTagsSegmentOptions} [options={}] Options to find blobs by tags.
- * @returns {Promise}
- * @memberof BlobServiceClient
- */
- BlobServiceClient.prototype.findBlobsByTagsSegment = function (tagFilterSqlExpression, marker, options) {
- if (options === void 0) { options = {}; }
- return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, e_8;
- return tslib.__generator(this, function (_b) {
- switch (_b.label) {
- case 0:
- _a = createSpan("BlobServiceClient-findBlobsByTagsSegment", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
- _b.label = 1;
- case 1:
- _b.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.serviceContext.filterBlobs({
- abortSignal: options.abortSignal,
- where: tagFilterSqlExpression,
- marker: marker,
- maxPageSize: options.maxPageSize,
- spanOptions: spanOptions
- })];
- case 2: return [2 /*return*/, _b.sent()];
+ return [4 /*yield*/, this.containerContext.listBlobHierarchySegment(delimiter, tslib.__assign(tslib.__assign({ marker: marker }, options), { spanOptions: spanOptions }))];
+ case 2:
+ response = _b.sent();
+ wrappedResponse = tslib.__assign(tslib.__assign({}, response), { _response: response._response, segment: tslib.__assign(tslib.__assign({}, response.segment), { blobItems: response.segment.blobItems.map(function (blobItemInteral) {
+ var blobItem = tslib.__assign(tslib.__assign({}, blobItemInteral), { tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) });
+ return blobItem;
+ }) }) });
+ return [2 /*return*/, wrappedResponse];
case 3:
- e_8 = _b.sent();
+ e_62 = _b.sent();
span.setStatus({
code: api.CanonicalCode.UNKNOWN,
- message: e_8.message
+ message: e_62.message
});
- throw e_8;
+ throw e_62;
case 4:
span.end();
return [7 /*endfinally*/];
@@ -53507,79 +53401,71 @@ var BlobServiceClient = /** @class */ (function (_super) {
});
};
/**
- * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse.
+ * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse
*
* @private
- * @param {string} tagFilterSqlExpression The where parameter enables the caller to query blobs whose tags match a given expression.
- * The given expression must evaluate to true for a blob to be returned in the results.
- * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;
- * however, only a subset of the OData filter syntax is supported in the Blob service.
* @param {string} [marker] A string value that identifies the portion of
* the list of blobs to be returned with the next listing operation. The
- * operation returns the NextMarker value within the response body if the
+ * operation returns the ContinuationToken value within the response body if the
* listing operation did not return all blobs remaining to be listed
- * with the current page. The NextMarker value can be used as the value for
+ * with the current page. The ContinuationToken value can be used as the value for
* the marker parameter in a subsequent call to request the next page of list
* items. The marker value is opaque to the client.
- * @param {ServiceFindBlobsByTagsSegmentOptions} [options={}] Options to find blobs by tags.
- * @returns {AsyncIterableIterator}
- * @memberof BlobServiceClient
+ * @param {ContainerListBlobsSegmentOptions} [options] Options to list blobs operation.
+ * @returns {AsyncIterableIterator}
+ * @memberof ContainerClient
*/
- BlobServiceClient.prototype.findBlobsByTagsSegments = function (tagFilterSqlExpression, marker, options) {
+ ContainerClient.prototype.listSegments = function (marker, options) {
if (options === void 0) { options = {}; }
- return tslib.__asyncGenerator(this, arguments, function findBlobsByTagsSegments_1() {
- var response;
+ return tslib.__asyncGenerator(this, arguments, function listSegments_1() {
+ var listBlobsFlatSegmentResponse;
return tslib.__generator(this, function (_a) {
switch (_a.label) {
case 0:
- if (!(!!marker || marker === undefined)) return [3 /*break*/, 6];
+ if (!(!!marker || marker === undefined)) return [3 /*break*/, 7];
_a.label = 1;
- case 1: return [4 /*yield*/, tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options))];
+ case 1: return [4 /*yield*/, tslib.__await(this.listBlobFlatSegment(marker, options))];
case 2:
- response = _a.sent();
- response.blobs = response.blobs || [];
- marker = response.continuationToken;
- return [4 /*yield*/, tslib.__await(response)];
- case 3: return [4 /*yield*/, _a.sent()];
- case 4:
- _a.sent();
- _a.label = 5;
+ listBlobsFlatSegmentResponse = _a.sent();
+ marker = listBlobsFlatSegmentResponse.continuationToken;
+ return [4 /*yield*/, tslib.__await(listBlobsFlatSegmentResponse)];
+ case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_a.sent()])];
+ case 4: return [4 /*yield*/, _a.sent()];
case 5:
- if (marker) return [3 /*break*/, 1];
+ _a.sent();
_a.label = 6;
- case 6: return [2 /*return*/];
+ case 6:
+ if (marker) return [3 /*break*/, 1];
+ _a.label = 7;
+ case 7: return [2 /*return*/];
}
});
});
};
/**
- * Returns an AsyncIterableIterator for blobs.
+ * Returns an AsyncIterableIterator of {@link BlobItem} objects
*
* @private
- * @param {string} tagFilterSqlExpression The where parameter enables the caller to query blobs whose tags match a given expression.
- * The given expression must evaluate to true for a blob to be returned in the results.
- * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;
- * however, only a subset of the OData filter syntax is supported in the Blob service.
- * @param {ServiceFindBlobsByTagsSegmentOptions} [options={}] Options to findBlobsByTagsItems.
- * @returns {AsyncIterableIterator}
- * @memberof BlobServiceClient
+ * @param {ContainerListBlobsSegmentOptions} [options] Options to list blobs operation.
+ * @returns {AsyncIterableIterator}
+ * @memberof ContainerClient
*/
- BlobServiceClient.prototype.findBlobsByTagsItems = function (tagFilterSqlExpression, options) {
+ ContainerClient.prototype.listItems = function (options) {
if (options === void 0) { options = {}; }
- return tslib.__asyncGenerator(this, arguments, function findBlobsByTagsItems_1() {
- var marker, _a, _b, segment, e_9_1;
- var e_9, _c;
+ return tslib.__asyncGenerator(this, arguments, function listItems_1() {
+ var marker, _a, _b, listBlobsFlatSegmentResponse, e_63_1;
+ var e_63, _c;
return tslib.__generator(this, function (_d) {
switch (_d.label) {
case 0:
_d.trys.push([0, 7, 8, 13]);
- _a = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options));
+ _a = tslib.__asyncValues(this.listSegments(marker, options));
_d.label = 1;
case 1: return [4 /*yield*/, tslib.__await(_a.next())];
case 2:
if (!(_b = _d.sent(), !_b.done)) return [3 /*break*/, 6];
- segment = _b.value;
- return [5 /*yield**/, tslib.__values(tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs)))];
+ listBlobsFlatSegmentResponse = _b.value;
+ return [5 /*yield**/, tslib.__values(tslib.__asyncDelegator(tslib.__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems)))];
case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_d.sent()])];
case 4:
_d.sent();
@@ -53587,8 +53473,8 @@ var BlobServiceClient = /** @class */ (function (_super) {
case 5: return [3 /*break*/, 1];
case 6: return [3 /*break*/, 13];
case 7:
- e_9_1 = _d.sent();
- e_9 = { error: e_9_1 };
+ e_63_1 = _d.sent();
+ e_63 = { error: e_63_1 };
return [3 /*break*/, 13];
case 8:
_d.trys.push([8, , 11, 12]);
@@ -53599,7 +53485,7 @@ var BlobServiceClient = /** @class */ (function (_super) {
_d.label = 10;
case 10: return [3 /*break*/, 12];
case 11:
- if (e_9) throw e_9.error;
+ if (e_63) throw e_63.error;
return [7 /*endfinally*/];
case 12: return [7 /*endfinally*/];
case 13: return [2 /*return*/];
@@ -53608,19 +53494,19 @@ var BlobServiceClient = /** @class */ (function (_super) {
});
};
/**
- * Returns an async iterable iterator to find all blobs with specified tag
+ * Returns an async iterable iterator to list all the blobs
* under the specified account.
*
* .byPage() returns an async iterable iterator to list the blobs in pages.
*
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties
- *
* Example using `for await` syntax:
*
* ```js
+ * // Get the containerClient before you run these snippets,
+ * // Can be obtained from `blobServiceClient.getContainerClient("");`
* let i = 1;
- * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) {
- * console.log(`Blob ${i++}: ${container.name}`);
+ * for await (const blob of containerClient.listBlobsFlat()) {
+ * console.log(`Blob ${i++}: ${blob.name}`);
* }
* ```
*
@@ -53628,7 +53514,7 @@ var BlobServiceClient = /** @class */ (function (_super) {
*
* ```js
* let i = 1;
- * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'");
+ * let iter = containerClient.listBlobsFlat();
* let blobItem = await iter.next();
* while (!blobItem.done) {
* console.log(`Blob ${i++}: ${blobItem.value.name}`);
@@ -53641,11 +53527,9 @@ var BlobServiceClient = /** @class */ (function (_super) {
* ```js
* // passing optional maxPageSize in the page settings
* let i = 1;
- * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) {
- * if (response.blobs) {
- * for (const blob of response.blobs) {
- * console.log(`Blob ${i++}: ${blob.name}`);
- * }
+ * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) {
+ * for (const blob of response.segment.blobItems) {
+ * console.log(`Blob ${i++}: ${blob.name}`);
* }
* }
* ```
@@ -53654,47 +53538,64 @@ var BlobServiceClient = /** @class */ (function (_super) {
*
* ```js
* let i = 1;
- * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 });
+ * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 });
* let response = (await iterator.next()).value;
*
* // Prints 2 blob names
- * if (response.blobs) {
- * for (const blob of response.blobs) {
- * console.log(`Blob ${i++}: ${blob.name}`);
- * }
+ * for (const blob of response.segment.blobItems) {
+ * console.log(`Blob ${i++}: ${blob.name}`);
* }
*
* // Gets next marker
* let marker = response.continuationToken;
+ *
* // Passing next marker as continuationToken
- * iterator = blobServiceClient
- * .findBlobsByTags("tagkey='tagvalue'")
- * .byPage({ continuationToken: marker, maxPageSize: 10 });
+ *
+ * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 });
* response = (await iterator.next()).value;
*
- * // Prints blob names
- * if (response.blobs) {
- * for (const blob of response.blobs) {
- * console.log(`Blob ${i++}: ${blob.name}`);
- * }
+ * // Prints 10 blob names
+ * for (const blob of response.segment.blobItems) {
+ * console.log(`Blob ${i++}: ${blob.name}`);
* }
* ```
*
- * @param {string} tagFilterSqlExpression The where parameter enables the caller to query blobs whose tags match a given expression.
- * The given expression must evaluate to true for a blob to be returned in the results.
- * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;
- * however, only a subset of the OData filter syntax is supported in the Blob service.
- * @param {ServiceFindBlobByTagsOptions} [options={}] Options to find blobs by tags.
- * @returns {PagedAsyncIterableIterator}
- * @memberof BlobServiceClient
+ * @param {ContainerListBlobsOptions} [options={}] Options to list blobs.
+ * @returns {PagedAsyncIterableIterator} An asyncIterableIterator that supports paging.
+ * @memberof ContainerClient
*/
- BlobServiceClient.prototype.findBlobsByTags = function (tagFilterSqlExpression, options) {
+ ContainerClient.prototype.listBlobsFlat = function (options) {
var _a;
var _this = this;
if (options === void 0) { options = {}; }
+ var include = [];
+ if (options.includeCopy) {
+ include.push("copy");
+ }
+ if (options.includeDeleted) {
+ include.push("deleted");
+ }
+ if (options.includeMetadata) {
+ include.push("metadata");
+ }
+ if (options.includeSnapshots) {
+ include.push("snapshots");
+ }
+ if (options.includeVersions) {
+ include.push("versions");
+ }
+ if (options.includeUncommitedBlobs) {
+ include.push("uncommittedblobs");
+ }
+ if (options.includeTags) {
+ include.push("tags");
+ }
+ if (options.prefix === "") {
+ options.prefix = undefined;
+ }
+ var updatedOptions = tslib.__assign(tslib.__assign({}, options), (include.length > 0 ? { include: include } : {}));
// AsyncIterableIterator to iterate over blobs
- var listSegmentOptions = tslib.__assign({}, options);
- var iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions);
+ var iter = this.listItems(updatedOptions);
return _a = {
/**
* @member {Promise} [next] The next method, part of the iteration protocol
@@ -53714,41 +53615,40 @@ var BlobServiceClient = /** @class */ (function (_super) {
*/
_a.byPage = function (settings) {
if (settings === void 0) { settings = {}; }
- return _this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, tslib.__assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions));
+ return _this.listSegments(settings.continuationToken, tslib.__assign({ maxPageSize: settings.maxPageSize }, updatedOptions));
},
_a;
};
/**
- * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses
+ * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse
*
* @private
+ * @param {string} delimiter The character or string used to define the virtual hierarchy
* @param {string} [marker] A string value that identifies the portion of
- * the list of containers to be returned with the next listing operation. The
- * operation returns the NextMarker value within the response body if the
- * listing operation did not return all containers remaining to be listed
- * with the current page. The NextMarker value can be used as the value for
- * the marker parameter in a subsequent call to request the next page of list
- * items. The marker value is opaque to the client.
- * @param {ServiceListContainersSegmentOptions} [options] Options to list containers operation.
- * @returns {AsyncIterableIterator}
- * @memberof BlobServiceClient
+ * the list of blobs to be returned with the next listing operation. The
+ * operation returns the ContinuationToken value within the response body if the
+ * listing operation did not return all blobs remaining to be listed
+ * with the current page. The ContinuationToken value can be used as the value for
+ * the marker parameter in a subsequent call to request the next page of list
+ * items. The marker value is opaque to the client.
+ * @param {ContainerListBlobsSegmentOptions} [options] Options to list blobs operation.
+ * @returns {AsyncIterableIterator}
+ * @memberof ContainerClient
*/
- BlobServiceClient.prototype.listSegments = function (marker, options) {
+ ContainerClient.prototype.listHierarchySegments = function (delimiter, marker, options) {
if (options === void 0) { options = {}; }
- return tslib.__asyncGenerator(this, arguments, function listSegments_1() {
- var listContainersSegmentResponse;
+ return tslib.__asyncGenerator(this, arguments, function listHierarchySegments_1() {
+ var listBlobsHierarchySegmentResponse;
return tslib.__generator(this, function (_a) {
switch (_a.label) {
case 0:
if (!(!!marker || marker === undefined)) return [3 /*break*/, 7];
_a.label = 1;
- case 1: return [4 /*yield*/, tslib.__await(this.listContainersSegment(marker, options))];
+ case 1: return [4 /*yield*/, tslib.__await(this.listBlobHierarchySegment(delimiter, marker, options))];
case 2:
- listContainersSegmentResponse = _a.sent();
- listContainersSegmentResponse.containerItems =
- listContainersSegmentResponse.containerItems || [];
- marker = listContainersSegmentResponse.continuationToken;
- return [4 /*yield*/, tslib.__await(listContainersSegmentResponse)];
+ listBlobsHierarchySegmentResponse = _a.sent();
+ marker = listBlobsHierarchySegmentResponse.continuationToken;
+ return [4 /*yield*/, tslib.__await(listBlobsHierarchySegmentResponse)];
case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_a.sent()])];
case 4: return [4 /*yield*/, _a.sent()];
case 5:
@@ -53763,147 +53663,207 @@ var BlobServiceClient = /** @class */ (function (_super) {
});
};
/**
- * Returns an AsyncIterableIterator for Container Items
+ * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects.
*
* @private
- * @param {ServiceListContainersSegmentOptions} [options] Options to list containers operation.
- * @returns {AsyncIterableIterator}
- * @memberof BlobServiceClient
+ * @param {string} delimiter The character or string used to define the virtual hierarchy
+ * @param {ContainerListBlobsSegmentOptions} [options] Options to list blobs operation.
+ * @returns {AsyncIterableIterator<{ kind: "prefix" } & BlobPrefix | { kind: "blob" } & BlobItem>}
+ * @memberof ContainerClient
*/
- BlobServiceClient.prototype.listItems = function (options) {
+ ContainerClient.prototype.listItemsByHierarchy = function (delimiter, options) {
if (options === void 0) { options = {}; }
- return tslib.__asyncGenerator(this, arguments, function listItems_1() {
- var marker, _a, _b, segment, e_10_1;
- var e_10, _c;
- return tslib.__generator(this, function (_d) {
- switch (_d.label) {
+ return tslib.__asyncGenerator(this, arguments, function listItemsByHierarchy_1() {
+ var marker, _a, _b, listBlobsHierarchySegmentResponse, segment, _i, _c, prefix, _d, _e, blob, e_64_1;
+ var e_64, _f;
+ return tslib.__generator(this, function (_g) {
+ switch (_g.label) {
case 0:
- _d.trys.push([0, 7, 8, 13]);
- _a = tslib.__asyncValues(this.listSegments(marker, options));
- _d.label = 1;
+ _g.trys.push([0, 14, 15, 20]);
+ _a = tslib.__asyncValues(this.listHierarchySegments(delimiter, marker, options));
+ _g.label = 1;
case 1: return [4 /*yield*/, tslib.__await(_a.next())];
case 2:
- if (!(_b = _d.sent(), !_b.done)) return [3 /*break*/, 6];
- segment = _b.value;
- return [5 /*yield**/, tslib.__values(tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems)))];
- case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_d.sent()])];
- case 4:
- _d.sent();
- _d.label = 5;
- case 5: return [3 /*break*/, 1];
- case 6: return [3 /*break*/, 13];
+ if (!(_b = _g.sent(), !_b.done)) return [3 /*break*/, 13];
+ listBlobsHierarchySegmentResponse = _b.value;
+ segment = listBlobsHierarchySegmentResponse.segment;
+ if (!segment.blobPrefixes) return [3 /*break*/, 7];
+ _i = 0, _c = segment.blobPrefixes;
+ _g.label = 3;
+ case 3:
+ if (!(_i < _c.length)) return [3 /*break*/, 7];
+ prefix = _c[_i];
+ return [4 /*yield*/, tslib.__await(tslib.__assign({ kind: "prefix" }, prefix))];
+ case 4: return [4 /*yield*/, _g.sent()];
+ case 5:
+ _g.sent();
+ _g.label = 6;
+ case 6:
+ _i++;
+ return [3 /*break*/, 3];
case 7:
- e_10_1 = _d.sent();
- e_10 = { error: e_10_1 };
- return [3 /*break*/, 13];
+ _d = 0, _e = segment.blobItems;
+ _g.label = 8;
case 8:
- _d.trys.push([8, , 11, 12]);
- if (!(_b && !_b.done && (_c = _a.return))) return [3 /*break*/, 10];
- return [4 /*yield*/, tslib.__await(_c.call(_a))];
- case 9:
- _d.sent();
- _d.label = 10;
- case 10: return [3 /*break*/, 12];
+ if (!(_d < _e.length)) return [3 /*break*/, 12];
+ blob = _e[_d];
+ return [4 /*yield*/, tslib.__await(tslib.__assign({ kind: "blob" }, blob))];
+ case 9: return [4 /*yield*/, _g.sent()];
+ case 10:
+ _g.sent();
+ _g.label = 11;
case 11:
- if (e_10) throw e_10.error;
+ _d++;
+ return [3 /*break*/, 8];
+ case 12: return [3 /*break*/, 1];
+ case 13: return [3 /*break*/, 20];
+ case 14:
+ e_64_1 = _g.sent();
+ e_64 = { error: e_64_1 };
+ return [3 /*break*/, 20];
+ case 15:
+ _g.trys.push([15, , 18, 19]);
+ if (!(_b && !_b.done && (_f = _a.return))) return [3 /*break*/, 17];
+ return [4 /*yield*/, tslib.__await(_f.call(_a))];
+ case 16:
+ _g.sent();
+ _g.label = 17;
+ case 17: return [3 /*break*/, 19];
+ case 18:
+ if (e_64) throw e_64.error;
return [7 /*endfinally*/];
- case 12: return [7 /*endfinally*/];
- case 13: return [2 /*return*/];
+ case 19: return [7 /*endfinally*/];
+ case 20: return [2 /*return*/];
}
});
});
};
/**
- * Returns an async iterable iterator to list all the containers
+ * Returns an async iterable iterator to list all the blobs by hierarchy.
* under the specified account.
*
- * .byPage() returns an async iterable iterator to list the containers in pages.
+ * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages.
*
* Example using `for await` syntax:
*
* ```js
- * let i = 1;
- * for await (const container of blobServiceClient.listContainers()) {
- * console.log(`Container ${i++}: ${container.name}`);
+ * for await (const item of containerClient.listBlobsByHierarchy("/")) {
+ * if (item.kind === "prefix") {
+ * console.log(`\tBlobPrefix: ${item.name}`);
+ * } else {
+ * console.log(`\tBlobItem: name - ${item.name}, last modified - ${item.properties.lastModified}`);
+ * }
* }
* ```
*
* Example using `iter.next()`:
*
* ```js
- * let i = 1;
- * const iter = blobServiceClient.listContainers();
- * let containerItem = await iter.next();
- * while (!containerItem.done) {
- * console.log(`Container ${i++}: ${containerItem.value.name}`);
- * containerItem = await iter.next();
- * }
- * ```
+ * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" });
+ * let entity = await iter.next();
+ * while (!entity.done) {
+ * let item = entity.value;
+ * if (item.kind === "prefix") {
+ * console.log(`\tBlobPrefix: ${item.name}`);
+ * } else {
+ * console.log(`\tBlobItem: name - ${item.name}, last modified - ${item.properties.lastModified}`);
+ * }
+ * entity = await iter.next();
+ * }
+ * ```js
*
* Example using `byPage()`:
*
* ```js
- * // passing optional maxPageSize in the page settings
- * let i = 1;
- * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) {
- * if (response.containerItems) {
- * for (const container of response.containerItems) {
- * console.log(`Container ${i++}: ${container.name}`);
+ * console.log("Listing blobs by hierarchy by page");
+ * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) {
+ * const segment = response.segment;
+ * if (segment.blobPrefixes) {
+ * for (const prefix of segment.blobPrefixes) {
+ * console.log(`\tBlobPrefix: ${prefix.name}`);
* }
* }
+ * for (const blob of response.segment.blobItems) {
+ * console.log(`\tBlobItem: name - ${blob.name}, last modified - ${blob.properties.lastModified}`);
+ * }
* }
* ```
*
- * Example using paging with a marker:
+ * Example using paging with a max page size:
*
* ```js
+ * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size");
+ *
* let i = 1;
- * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 });
- * let response = (await iterator.next()).value;
+ * for await (const response of containerClient.listBlobsByHierarchy("/", { prefix: "prefix2/sub1/"}).byPage({ maxPageSize: 2 })) {
+ * console.log(`Page ${i++}`);
+ * const segment = response.segment;
*
- * // Prints 2 container names
- * if (response.containerItems) {
- * for (const container of response.containerItems) {
- * console.log(`Container ${i++}: ${container.name}`);
+ * if (segment.blobPrefixes) {
+ * for (const prefix of segment.blobPrefixes) {
+ * console.log(`\tBlobPrefix: ${prefix.name}`);
+ * }
* }
- * }
- *
- * // Gets next marker
- * let marker = response.continuationToken;
- * // Passing next marker as continuationToken
- * iterator = blobServiceClient
- * .listContainers()
- * .byPage({ continuationToken: marker, maxPageSize: 10 });
- * response = (await iterator.next()).value;
*
- * // Prints 10 container names
- * if (response.containerItems) {
- * for (const container of response.containerItems) {
- * console.log(`Container ${i++}: ${container.name}`);
+ * for (const blob of response.segment.blobItems) {
+ * console.log(`\tBlobItem: name - ${blob.name}, last modified - ${blob.properties.lastModified}`);
* }
* }
* ```
*
- * @param {ServiceListContainersOptions} [options={}] Options to list containers.
- * @returns {PagedAsyncIterableIterator} An asyncIterableIterator that supports paging.
- * @memberof BlobServiceClient
+ * @param {string} delimiter The character or string used to define the virtual hierarchy
+ * @param {ContainerListBlobsOptions} [options={}] Options to list blobs operation.
+ * @returns {(PagedAsyncIterableIterator<
+ * { kind: "prefix" } & BlobPrefix | { kind: "blob" } & BlobItem,
+ * ContainerListBlobHierarchySegmentResponse
+ * >)}
+ * @memberof ContainerClient
*/
- BlobServiceClient.prototype.listContainers = function (options) {
+ ContainerClient.prototype.listBlobsByHierarchy = function (delimiter, options) {
var _a;
var _this = this;
if (options === void 0) { options = {}; }
+ if (delimiter === "") {
+ throw new RangeError("delimiter should contain one or more characters");
+ }
+ var include = [];
+ if (options.includeCopy) {
+ include.push("copy");
+ }
+ if (options.includeDeleted) {
+ include.push("deleted");
+ }
+ if (options.includeMetadata) {
+ include.push("metadata");
+ }
+ if (options.includeSnapshots) {
+ include.push("snapshots");
+ }
+ if (options.includeVersions) {
+ include.push("versions");
+ }
+ if (options.includeUncommitedBlobs) {
+ include.push("uncommittedblobs");
+ }
+ if (options.includeTags) {
+ include.push("tags");
+ }
if (options.prefix === "") {
options.prefix = undefined;
}
- // AsyncIterableIterator to iterate over containers
- var listSegmentOptions = tslib.__assign(tslib.__assign({}, options), (options.includeMetadata ? { include: "metadata" } : {}));
- var iter = this.listItems(listSegmentOptions);
+ var updatedOptions = tslib.__assign(tslib.__assign({}, options), (include.length > 0 ? { include: include } : {}));
+ // AsyncIterableIterator to iterate over blob prefixes and blobs
+ var iter = this.listItemsByHierarchy(delimiter, updatedOptions);
return _a = {
/**
* @member {Promise} [next] The next method, part of the iteration protocol
*/
next: function () {
- return iter.next();
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ return tslib.__generator(this, function (_a) {
+ return [2 /*return*/, iter.next()];
+ });
+ });
}
},
/**
@@ -53917,9044 +53877,18791 @@ var BlobServiceClient = /** @class */ (function (_super) {
*/
_a.byPage = function (settings) {
if (settings === void 0) { settings = {}; }
- return _this.listSegments(settings.continuationToken, tslib.__assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions));
+ return _this.listHierarchySegments(delimiter, settings.continuationToken, tslib.__assign({ maxPageSize: settings.maxPageSize }, updatedOptions));
},
_a;
};
+ ContainerClient.prototype.getContainerNameFromUrl = function () {
+ var containerName;
+ try {
+ // URL may look like the following
+ // "https://myaccount.blob.core.windows.net/mycontainer?sasString";
+ // "https://myaccount.blob.core.windows.net/mycontainer";
+ // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername`
+ // http://localhost:10001/devstoreaccount1/containername
+ var parsedUrl = coreHttp.URLBuilder.parse(this.url);
+ if (parsedUrl.getHost().split(".")[1] === "blob") {
+ // "https://myaccount.blob.core.windows.net/containername".
+ // "https://customdomain.com/containername".
+ // .getPath() -> /containername
+ containerName = parsedUrl.getPath().split("/")[1];
+ }
+ else if (isIpEndpointStyle(parsedUrl)) {
+ // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername
+ // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername
+ // .getPath() -> /devstoreaccount1/containername
+ containerName = parsedUrl.getPath().split("/")[2];
+ }
+ else {
+ // "https://customdomain.com/containername".
+ // .getPath() -> /containername
+ containerName = parsedUrl.getPath().split("/")[1];
+ }
+ // decode the encoded containerName - to get all the special characters that might be present in it
+ containerName = decodeURIComponent(containerName);
+ if (!containerName) {
+ throw new Error("Provided containerName is invalid.");
+ }
+ return containerName;
+ }
+ catch (error) {
+ throw new Error("Unable to extract containerName with provided information.");
+ }
+ };
+ return ContainerClient;
+}(StorageClient));
+
+function getBodyAsText(batchResponse) {
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var buffer, responseLength;
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0:
+ buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES);
+ return [4 /*yield*/, streamToBuffer2(batchResponse.readableStreamBody, buffer)];
+ case 1:
+ responseLength = _a.sent();
+ // Slice the buffer to trim the empty ending.
+ buffer = buffer.slice(0, responseLength);
+ return [2 /*return*/, buffer.toString()];
+ }
+ });
+ });
+}
+function utf8ByteLength(str) {
+ return Buffer.byteLength(str);
+}
+
+var HTTP_HEADER_DELIMITER = ": ";
+var SPACE_DELIMITER = " ";
+var NOT_FOUND = -1;
+/**
+ * Util class for parsing batch response.
+ */
+var BatchResponseParser = /** @class */ (function () {
+ function BatchResponseParser(batchResponse, subRequests) {
+ if (!batchResponse || !batchResponse.contentType) {
+ // In special case(reported), server may return invalid content-type which could not be parsed.
+ throw new RangeError("batchResponse is malformed or doesn't contain valid content-type.");
+ }
+ if (!subRequests || subRequests.size === 0) {
+ // This should be prevent during coding.
+ throw new RangeError("Invalid state: subRequests is not provided or size is 0.");
+ }
+ this.batchResponse = batchResponse;
+ this.subRequests = subRequests;
+ this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1];
+ this.perResponsePrefix = "--" + this.responseBatchBoundary + HTTP_LINE_ENDING;
+ this.batchResponseEnding = "--" + this.responseBatchBoundary + "--";
+ }
+ // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response
+ BatchResponseParser.prototype.parseBatchResponse = function () {
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var responseBodyAsText, subResponses, subResponseCount, deserializedSubResponses, subResponsesSucceededCount, subResponsesFailedCount, index, subResponse, deserializedSubResponse, responseLines, subRespHeaderStartFound, subRespHeaderEndFound, subRespFailed, contentId, _i, responseLines_1, responseLine, tokens, tokens;
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0:
+ // When logic reach here, suppose batch request has already succeeded with 202, so we can further parse
+ // sub request's response.
+ if (this.batchResponse._response.status != HTTPURLConnection.HTTP_ACCEPTED) {
+ throw new Error("Invalid state: batch request failed with status: '" + this.batchResponse._response.status + "'.");
+ }
+ return [4 /*yield*/, getBodyAsText(this.batchResponse)];
+ case 1:
+ responseBodyAsText = _a.sent();
+ subResponses = responseBodyAsText
+ .split(this.batchResponseEnding)[0] // string after ending is useless
+ .split(this.perResponsePrefix)
+ .slice(1);
+ subResponseCount = subResponses.length;
+ // Defensive coding in case of potential error parsing.
+ // Note: subResponseCount == 1 is special case where sub request is invalid.
+ // We try to prevent such cases through early validation, e.g. validate sub request count >= 1.
+ // While in unexpected sub request invalid case, we allow sub response to be parsed and return to user.
+ if (subResponseCount != this.subRequests.size && subResponseCount != 1) {
+ throw new Error("Invalid state: sub responses' count is not equal to sub requests' count.");
+ }
+ deserializedSubResponses = new Array(subResponseCount);
+ subResponsesSucceededCount = 0;
+ subResponsesFailedCount = 0;
+ // Parse sub subResponses.
+ for (index = 0; index < subResponseCount; index++) {
+ subResponse = subResponses[index];
+ deserializedSubResponses[index] = {};
+ deserializedSubResponse = deserializedSubResponses[index];
+ deserializedSubResponse.headers = new coreHttp.HttpHeaders();
+ responseLines = subResponse.split("" + HTTP_LINE_ENDING);
+ subRespHeaderStartFound = false;
+ subRespHeaderEndFound = false;
+ subRespFailed = false;
+ contentId = NOT_FOUND;
+ for (_i = 0, responseLines_1 = responseLines; _i < responseLines_1.length; _i++) {
+ responseLine = responseLines_1[_i];
+ if (!subRespHeaderStartFound) {
+ // Convention line to indicate content ID
+ if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) {
+ contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]);
+ }
+ // Http version line with status code indicates the start of sub request's response.
+ // Example: HTTP/1.1 202 Accepted
+ if (responseLine.startsWith(HTTP_VERSION_1_1)) {
+ subRespHeaderStartFound = true;
+ tokens = responseLine.split(SPACE_DELIMITER);
+ deserializedSubResponse.status = parseInt(tokens[1]);
+ deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER);
+ }
+ continue; // Skip convention headers not specifically for sub request i.e. Content-Type: application/http and Content-ID: *
+ }
+ if (responseLine.trim() === "") {
+ // Sub response's header start line already found, and the first empty line indicates header end line found.
+ if (!subRespHeaderEndFound) {
+ subRespHeaderEndFound = true;
+ }
+ continue; // Skip empty line
+ }
+ // Note: when code reach here, it indicates subRespHeaderStartFound == true
+ if (!subRespHeaderEndFound) {
+ if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) {
+ // Defensive coding to prevent from missing valuable lines.
+ throw new Error("Invalid state: find non-empty line '" + responseLine + "' without HTTP header delimiter '" + HTTP_HEADER_DELIMITER + "'.");
+ }
+ tokens = responseLine.split(HTTP_HEADER_DELIMITER);
+ deserializedSubResponse.headers.set(tokens[0], tokens[1]);
+ if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) {
+ deserializedSubResponse.errorCode = tokens[1];
+ subRespFailed = true;
+ }
+ }
+ else {
+ // Assemble body of sub response.
+ if (!deserializedSubResponse.bodyAsText) {
+ deserializedSubResponse.bodyAsText = "";
+ }
+ deserializedSubResponse.bodyAsText += responseLine;
+ }
+ } // Inner for end
+ if (contentId != NOT_FOUND) {
+ deserializedSubResponse._request = this.subRequests.get(contentId);
+ }
+ if (subRespFailed) {
+ subResponsesFailedCount++;
+ }
+ else {
+ subResponsesSucceededCount++;
+ }
+ }
+ return [2 /*return*/, {
+ subResponses: deserializedSubResponses,
+ subResponsesSucceededCount: subResponsesSucceededCount,
+ subResponsesFailedCount: subResponsesFailedCount
+ }];
+ }
+ });
+ });
+ };
+ return BatchResponseParser;
+}());
+
+var MutexLockStatus;
+(function (MutexLockStatus) {
+ MutexLockStatus[MutexLockStatus["LOCKED"] = 0] = "LOCKED";
+ MutexLockStatus[MutexLockStatus["UNLOCKED"] = 1] = "UNLOCKED";
+})(MutexLockStatus || (MutexLockStatus = {}));
+/**
+ * An async mutex lock.
+ *
+ * @export
+ * @class Mutex
+ */
+var Mutex = /** @class */ (function () {
+ function Mutex() {
+ }
/**
- * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential).
- *
- * Retrieves a user delegation key for the Blob service. This is only a valid operation when using
- * bearer token authentication.
+ * Lock for a specific key. If the lock has been acquired by another customer, then
+ * will wait until getting the lock.
*
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key
+ * @static
+ * @param {string} key lock key
+ * @returns {Promise}
+ * @memberof Mutex
+ */
+ Mutex.lock = function (key) {
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var _this = this;
+ return tslib.__generator(this, function (_a) {
+ return [2 /*return*/, new Promise(function (resolve) {
+ if (_this.keys[key] === undefined || _this.keys[key] === MutexLockStatus.UNLOCKED) {
+ _this.keys[key] = MutexLockStatus.LOCKED;
+ resolve();
+ }
+ else {
+ _this.onUnlockEvent(key, function () {
+ _this.keys[key] = MutexLockStatus.LOCKED;
+ resolve();
+ });
+ }
+ })];
+ });
+ });
+ };
+ /**
+ * Unlock a key.
*
- * @param {Date} startsOn The start time for the user delegation SAS. Must be within 7 days of the current time
- * @param {Date} expiresOn The end time for the user delegation SAS. Must be within 7 days of the current time
- * @returns {Promise}
- * @memberof BlobServiceClient
+ * @static
+ * @param {string} key
+ * @returns {Promise}
+ * @memberof Mutex
*/
- BlobServiceClient.prototype.getUserDelegationKey = function (startsOn, expiresOn, options) {
- if (options === void 0) { options = {}; }
+ Mutex.unlock = function (key) {
return tslib.__awaiter(this, void 0, void 0, function () {
- var _a, span, spanOptions, response, userDelegationKey, res, e_11;
- return tslib.__generator(this, function (_b) {
- switch (_b.label) {
- case 0:
- _a = createSpan("BlobServiceClient-getUserDelegationKey", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
- _b.label = 1;
+ var _this = this;
+ return tslib.__generator(this, function (_a) {
+ return [2 /*return*/, new Promise(function (resolve) {
+ if (_this.keys[key] === MutexLockStatus.LOCKED) {
+ _this.emitUnlockEvent(key);
+ }
+ delete _this.keys[key];
+ resolve();
+ })];
+ });
+ });
+ };
+ Mutex.onUnlockEvent = function (key, handler) {
+ if (this.listeners[key] === undefined) {
+ this.listeners[key] = [handler];
+ }
+ else {
+ this.listeners[key].push(handler);
+ }
+ };
+ Mutex.emitUnlockEvent = function (key) {
+ var _this = this;
+ if (this.listeners[key] !== undefined && this.listeners[key].length > 0) {
+ var handler_1 = this.listeners[key].shift();
+ setImmediate(function () {
+ handler_1.call(_this);
+ });
+ }
+ };
+ Mutex.keys = {};
+ Mutex.listeners = {};
+ return Mutex;
+}());
+
+/**
+ * A BlobBatch represents an aggregated set of operations on blobs.
+ * Currently, only `delete` and `setAccessTier` are supported.
+ *
+ * @export
+ * @class BlobBatch
+ */
+var BlobBatch = /** @class */ (function () {
+ function BlobBatch() {
+ this.batch = "batch";
+ this.batchRequest = new InnerBatchRequest();
+ }
+ /**
+ * Get the value of Content-Type for a batch request.
+ * The value must be multipart/mixed with a batch boundary.
+ * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252
+ */
+ BlobBatch.prototype.getMultiPartContentType = function () {
+ return this.batchRequest.getMultipartContentType();
+ };
+ /**
+ * Get assembled HTTP request body for sub requests.
+ */
+ BlobBatch.prototype.getHttpRequestBody = function () {
+ return this.batchRequest.getHttpRequestBody();
+ };
+ /**
+ * Get sub requests that are added into the batch request.
+ */
+ BlobBatch.prototype.getSubRequests = function () {
+ return this.batchRequest.getSubRequests();
+ };
+ BlobBatch.prototype.addSubRequestInternal = function (subRequest, assembleSubRequestFunc) {
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0: return [4 /*yield*/, Mutex.lock(this.batch)];
case 1:
- _b.trys.push([1, 3, 4, 5]);
- return [4 /*yield*/, this.serviceContext.getUserDelegationKey({
- startsOn: truncatedISO8061Date(startsOn, false),
- expiresOn: truncatedISO8061Date(expiresOn, false)
- }, {
- abortSignal: options.abortSignal,
- spanOptions: spanOptions
- })];
+ _a.sent();
+ _a.label = 2;
case 2:
- response = _b.sent();
- userDelegationKey = {
- signedObjectId: response.signedObjectId,
- signedTenantId: response.signedTenantId,
- signedStartsOn: new Date(response.signedStartsOn),
- signedExpiresOn: new Date(response.signedExpiresOn),
- signedService: response.signedService,
- signedVersion: response.signedVersion,
- value: response.value
- };
- res = tslib.__assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey);
- return [2 /*return*/, res];
+ _a.trys.push([2, , 4, 6]);
+ this.batchRequest.preAddSubRequest(subRequest);
+ return [4 /*yield*/, assembleSubRequestFunc()];
case 3:
- e_11 = _b.sent();
- span.setStatus({
- code: api.CanonicalCode.UNKNOWN,
- message: e_11.message
- });
- throw e_11;
- case 4:
- span.end();
+ _a.sent();
+ this.batchRequest.postAddSubRequest(subRequest);
+ return [3 /*break*/, 6];
+ case 4: return [4 /*yield*/, Mutex.unlock(this.batch)];
+ case 5:
+ _a.sent();
return [7 /*endfinally*/];
- case 5: return [2 /*return*/];
+ case 6: return [2 /*return*/];
}
});
});
};
- /**
- * Creates a BlobBatchClient object to conduct batch operations.
- *
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch
- *
- * @returns {BlobBatchClient} A new BlobBatchClient object for this service.
- * @memberof BlobServiceClient
- */
- BlobServiceClient.prototype.getBlobBatchClient = function () {
- return new BlobBatchClient(this.url, this.pipeline);
+ BlobBatch.prototype.setBatchType = function (batchType) {
+ if (!this.batchType) {
+ this.batchType = batchType;
+ }
+ if (this.batchType !== batchType) {
+ throw new RangeError("BlobBatch only supports one operation type per batch and it already is being used for " + this.batchType + " operations.");
+ }
};
- return BlobServiceClient;
-}(StorageClient));
+ BlobBatch.prototype.deleteBlob = function (urlOrBlobClient, credentialOrOptions, options) {
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var url, credential, _a, span, spanOptions, e_1;
+ var _this = this;
+ return tslib.__generator(this, function (_b) {
+ switch (_b.label) {
+ case 0:
+ if (typeof urlOrBlobClient === "string" &&
+ ((coreHttp.isNode && credentialOrOptions instanceof StorageSharedKeyCredential) ||
+ credentialOrOptions instanceof AnonymousCredential ||
+ coreHttp.isTokenCredential(credentialOrOptions))) {
+ // First overload
+ url = urlOrBlobClient;
+ credential = credentialOrOptions;
+ }
+ else if (urlOrBlobClient instanceof BlobClient) {
+ // Second overload
+ url = urlOrBlobClient.url;
+ credential = urlOrBlobClient.credential;
+ options = credentialOrOptions;
+ }
+ else {
+ throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided.");
+ }
+ if (!options) {
+ options = {};
+ }
+ _a = createSpan("BatchDeleteRequest-addSubRequest", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _b.label = 1;
+ case 1:
+ _b.trys.push([1, 3, 4, 5]);
+ this.setBatchType("delete");
+ return [4 /*yield*/, this.addSubRequestInternal({
+ url: url,
+ credential: credential
+ }, function () { return tslib.__awaiter(_this, void 0, void 0, function () {
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0: return [4 /*yield*/, new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
+ case 1:
+ _a.sent();
+ return [2 /*return*/];
+ }
+ });
+ }); })];
+ case 2:
+ _b.sent();
+ return [3 /*break*/, 5];
+ case 3:
+ e_1 = _b.sent();
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_1.message
+ });
+ throw e_1;
+ case 4:
+ span.end();
+ return [7 /*endfinally*/];
+ case 5: return [2 /*return*/];
+ }
+ });
+ });
+ };
+ BlobBatch.prototype.setBlobAccessTier = function (urlOrBlobClient, credentialOrTier, tierOrOptions, options) {
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var url, credential, tier, _a, span, spanOptions, e_2;
+ var _this = this;
+ return tslib.__generator(this, function (_b) {
+ switch (_b.label) {
+ case 0:
+ if (typeof urlOrBlobClient === "string" &&
+ ((coreHttp.isNode && credentialOrTier instanceof StorageSharedKeyCredential) ||
+ credentialOrTier instanceof AnonymousCredential ||
+ coreHttp.isTokenCredential(credentialOrTier))) {
+ // First overload
+ url = urlOrBlobClient;
+ credential = credentialOrTier;
+ tier = tierOrOptions;
+ }
+ else if (urlOrBlobClient instanceof BlobClient) {
+ // Second overload
+ url = urlOrBlobClient.url;
+ credential = urlOrBlobClient.credential;
+ tier = credentialOrTier;
+ options = tierOrOptions;
+ }
+ else {
+ throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided.");
+ }
+ if (!options) {
+ options = {};
+ }
+ _a = createSpan("BatchSetTierRequest-addSubRequest", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _b.label = 1;
+ case 1:
+ _b.trys.push([1, 3, 4, 5]);
+ this.setBatchType("setAccessTier");
+ return [4 /*yield*/, this.addSubRequestInternal({
+ url: url,
+ credential: credential
+ }, function () { return tslib.__awaiter(_this, void 0, void 0, function () {
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0: return [4 /*yield*/, new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
+ case 1:
+ _a.sent();
+ return [2 /*return*/];
+ }
+ });
+ }); })];
+ case 2:
+ _b.sent();
+ return [3 /*break*/, 5];
+ case 3:
+ e_2 = _b.sent();
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_2.message
+ });
+ throw e_2;
+ case 4:
+ span.end();
+ return [7 /*endfinally*/];
+ case 5: return [2 /*return*/];
+ }
+ });
+ });
+ };
+ return BlobBatch;
+}());
+/**
+ * Inner batch request class which is responsible for assembling and serializing sub requests.
+ * See https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#request-body for how requests are assembled.
+ */
+var InnerBatchRequest = /** @class */ (function () {
+ function InnerBatchRequest() {
+ this.operationCount = 0;
+ this.body = "";
+ var tempGuid = coreHttp.generateUuid();
+ // batch_{batchid}
+ this.boundary = "batch_" + tempGuid;
+ // --batch_{batchid}
+ // Content-Type: application/http
+ // Content-Transfer-Encoding: binary
+ this.subRequestPrefix = "--" + this.boundary + HTTP_LINE_ENDING + HeaderConstants.CONTENT_TYPE + ": application/http" + HTTP_LINE_ENDING + HeaderConstants.CONTENT_TRANSFER_ENCODING + ": binary";
+ // multipart/mixed; boundary=batch_{batchid}
+ this.multipartContentType = "multipart/mixed; boundary=" + this.boundary;
+ // --batch_{batchid}--
+ this.batchRequestEnding = "--" + this.boundary + "--";
+ this.subRequests = new Map();
+ }
+ /**
+ * Create pipeline to assemble sub requests. The idea here is to use existing
+ * credential and serialization/deserialization components, with additional policies to
+ * filter unnecessary headers, assemble sub requests into request's body
+ * and intercept request from going to wire.
+ * @param {StorageSharedKeyCredential | AnonymousCredential | TokenCredential} credential Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the @azure/identity package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.
+ */
+ InnerBatchRequest.prototype.createPipeline = function (credential) {
+ var isAnonymousCreds = credential instanceof AnonymousCredential;
+ var policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); // [deserializationPolicy, BatchHeaderFilterPolicyFactory, (Optional)Credential, BatchRequestAssemblePolicyFactory]
+ var factories = new Array(policyFactoryLength);
+ factories[0] = coreHttp.deserializationPolicy(); // Default deserializationPolicy is provided by protocol layer
+ factories[1] = new BatchHeaderFilterPolicyFactory(); // Use batch header filter policy to exclude unnecessary headers
+ if (!isAnonymousCreds) {
+ factories[2] = coreHttp.isTokenCredential(credential)
+ ? coreHttp.bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes)
+ : credential;
+ }
+ factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); // Use batch assemble policy to assemble request and intercept request from going to wire
+ return new Pipeline(factories, {});
+ };
+ InnerBatchRequest.prototype.appendSubRequestToBody = function (request) {
+ // Start to assemble sub request
+ this.body += [
+ this.subRequestPrefix,
+ HeaderConstants.CONTENT_ID + ": " + this.operationCount,
+ "",
+ request.method.toString() + " " + getURLPathAndQuery(request.url) + " " + HTTP_VERSION_1_1 + HTTP_LINE_ENDING // sub request start line with method
+ ].join(HTTP_LINE_ENDING);
+ for (var _i = 0, _a = request.headers.headersArray(); _i < _a.length; _i++) {
+ var header = _a[_i];
+ this.body += header.name + ": " + header.value + HTTP_LINE_ENDING;
+ }
+ this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line
+ // No body to assemble for current batch request support
+ // End to assemble sub request
+ };
+ InnerBatchRequest.prototype.preAddSubRequest = function (subRequest) {
+ if (this.operationCount >= BATCH_MAX_REQUEST) {
+ throw new RangeError("Cannot exceed " + BATCH_MAX_REQUEST + " sub requests in a single batch");
+ }
+ // Fast fail if url for sub request is invalid
+ var path = getURLPath(subRequest.url);
+ if (!path || path == "") {
+ throw new RangeError("Invalid url for sub request: '" + subRequest.url + "'");
+ }
+ };
+ InnerBatchRequest.prototype.postAddSubRequest = function (subRequest) {
+ this.subRequests.set(this.operationCount, subRequest);
+ this.operationCount++;
+ };
+ // Return the http request body with assembling the ending line to the sub request body.
+ InnerBatchRequest.prototype.getHttpRequestBody = function () {
+ return "" + this.body + this.batchRequestEnding + HTTP_LINE_ENDING;
+ };
+ InnerBatchRequest.prototype.getMultipartContentType = function () {
+ return this.multipartContentType;
+ };
+ InnerBatchRequest.prototype.getSubRequests = function () {
+ return this.subRequests;
+ };
+ return InnerBatchRequest;
+}());
+var BatchRequestAssemblePolicy = /** @class */ (function (_super) {
+ tslib.__extends(BatchRequestAssemblePolicy, _super);
+ function BatchRequestAssemblePolicy(batchRequest, nextPolicy, options) {
+ var _this = _super.call(this, nextPolicy, options) || this;
+ _this.dummyResponse = {
+ request: new coreHttp.WebResource(),
+ status: 200,
+ headers: new coreHttp.HttpHeaders()
+ };
+ _this.batchRequest = batchRequest;
+ return _this;
+ }
+ BatchRequestAssemblePolicy.prototype.sendRequest = function (request) {
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0: return [4 /*yield*/, this.batchRequest.appendSubRequestToBody(request)];
+ case 1:
+ _a.sent();
+ return [2 /*return*/, this.dummyResponse]; // Intercept request from going to wire
+ }
+ });
+ });
+ };
+ return BatchRequestAssemblePolicy;
+}(coreHttp.BaseRequestPolicy));
+var BatchRequestAssemblePolicyFactory = /** @class */ (function () {
+ function BatchRequestAssemblePolicyFactory(batchRequest) {
+ this.batchRequest = batchRequest;
+ }
+ BatchRequestAssemblePolicyFactory.prototype.create = function (nextPolicy, options) {
+ return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options);
+ };
+ return BatchRequestAssemblePolicyFactory;
+}());
+var BatchHeaderFilterPolicy = /** @class */ (function (_super) {
+ tslib.__extends(BatchHeaderFilterPolicy, _super);
+ function BatchHeaderFilterPolicy(nextPolicy, options) {
+ return _super.call(this, nextPolicy, options) || this;
+ }
+ BatchHeaderFilterPolicy.prototype.sendRequest = function (request) {
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var xMsHeaderName, _i, _a, header;
+ return tslib.__generator(this, function (_b) {
+ xMsHeaderName = "";
+ for (_i = 0, _a = request.headers.headersArray(); _i < _a.length; _i++) {
+ header = _a[_i];
+ if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) {
+ xMsHeaderName = header.name;
+ }
+ }
+ if (xMsHeaderName !== "") {
+ request.headers.remove(xMsHeaderName); // The subrequests should not have the x-ms-version header.
+ }
+ return [2 /*return*/, this._nextPolicy.sendRequest(request)];
+ });
+ });
+ };
+ return BatchHeaderFilterPolicy;
+}(coreHttp.BaseRequestPolicy));
+var BatchHeaderFilterPolicyFactory = /** @class */ (function () {
+ function BatchHeaderFilterPolicyFactory() {
+ }
+ BatchHeaderFilterPolicyFactory.prototype.create = function (nextPolicy, options) {
+ return new BatchHeaderFilterPolicy(nextPolicy, options);
+ };
+ return BatchHeaderFilterPolicyFactory;
+}());
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+/**
+ * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service.
+ *
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch
+ */
+var BlobBatchClient = /** @class */ (function () {
+ function BlobBatchClient(url, credentialOrPipeline, options) {
+ var pipeline;
+ if (credentialOrPipeline instanceof Pipeline) {
+ pipeline = credentialOrPipeline;
+ }
+ else if (!credentialOrPipeline) {
+ // no credential provided
+ pipeline = newPipeline(new AnonymousCredential(), options);
+ }
+ else {
+ pipeline = newPipeline(credentialOrPipeline, options);
+ }
+ var storageClientContext = new StorageClientContext(url, pipeline.toServiceClientOptions());
+ this._serviceContext = new Service(storageClientContext);
+ }
+ /**
+ * Creates a {@link BlobBatch}.
+ * A BlobBatch represents an aggregated set of operations on blobs.
+ */
+ BlobBatchClient.prototype.createBatch = function () {
+ return new BlobBatch();
+ };
+ BlobBatchClient.prototype.deleteBlobs = function (urlsOrBlobClients, credentialOrOptions, options) {
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var batch, _i, urlsOrBlobClients_1, urlOrBlobClient;
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0:
+ batch = new BlobBatch();
+ _i = 0, urlsOrBlobClients_1 = urlsOrBlobClients;
+ _a.label = 1;
+ case 1:
+ if (!(_i < urlsOrBlobClients_1.length)) return [3 /*break*/, 6];
+ urlOrBlobClient = urlsOrBlobClients_1[_i];
+ if (!(typeof urlOrBlobClient === "string")) return [3 /*break*/, 3];
+ return [4 /*yield*/, batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options)];
+ case 2:
+ _a.sent();
+ return [3 /*break*/, 5];
+ case 3: return [4 /*yield*/, batch.deleteBlob(urlOrBlobClient, credentialOrOptions)];
+ case 4:
+ _a.sent();
+ _a.label = 5;
+ case 5:
+ _i++;
+ return [3 /*break*/, 1];
+ case 6: return [2 /*return*/, this.submitBatch(batch)];
+ }
+ });
+ });
+ };
+ BlobBatchClient.prototype.setBlobsAccessTier = function (urlsOrBlobClients, credentialOrTier, tierOrOptions, options) {
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var batch, _i, urlsOrBlobClients_2, urlOrBlobClient;
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0:
+ batch = new BlobBatch();
+ _i = 0, urlsOrBlobClients_2 = urlsOrBlobClients;
+ _a.label = 1;
+ case 1:
+ if (!(_i < urlsOrBlobClients_2.length)) return [3 /*break*/, 6];
+ urlOrBlobClient = urlsOrBlobClients_2[_i];
+ if (!(typeof urlOrBlobClient === "string")) return [3 /*break*/, 3];
+ return [4 /*yield*/, batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options)];
+ case 2:
+ _a.sent();
+ return [3 /*break*/, 5];
+ case 3: return [4 /*yield*/, batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions)];
+ case 4:
+ _a.sent();
+ _a.label = 5;
+ case 5:
+ _i++;
+ return [3 /*break*/, 1];
+ case 6: return [2 /*return*/, this.submitBatch(batch)];
+ }
+ });
+ });
+ };
+ /**
+ * Submit batch request which consists of multiple subrequests.
+ *
+ * Get `blobBatchClient` and other details before running the snippets.
+ * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient`
+ *
+ * Example usage:
+ *
+ * ```js
+ * let batchRequest = new BlobBatch();
+ * await batchRequest.deleteBlob(urlInString0, credential0);
+ * await batchRequest.deleteBlob(urlInString1, credential1, {
+ * deleteSnapshots: "include"
+ * });
+ * const batchResp = await blobBatchClient.submitBatch(batchRequest);
+ * console.log(batchResp.subResponsesSucceededCount);
+ * ```
+ *
+ * Example using a lease:
+ *
+ * ```js
+ * let batchRequest = new BlobBatch();
+ * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool");
+ * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", {
+ * conditions: { leaseId: leaseId }
+ * });
+ * const batchResp = await blobBatchClient.submitBatch(batchRequest);
+ * console.log(batchResp.subResponsesSucceededCount);
+ * ```
+ *
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch
+ *
+ * @param {BlobBatch} batchRequest A set of Delete or SetTier operations.
+ * @param {BlobBatchSubmitBatchOptionalParams} [options]
+ * @returns {Promise}
+ * @memberof BlobBatchClient
+ */
+ BlobBatchClient.prototype.submitBatch = function (batchRequest, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var _a, span, spanOptions, batchRequestBody, rawBatchResponse, batchResponseParser, responseSummary, res, e_1;
+ return tslib.__generator(this, function (_b) {
+ switch (_b.label) {
+ case 0:
+ if (!batchRequest || batchRequest.getSubRequests().size == 0) {
+ throw new RangeError("Batch request should contain one or more sub requests.");
+ }
+ _a = createSpan("BlobBatchClient-submitBatch", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _b.label = 1;
+ case 1:
+ _b.trys.push([1, 4, 5, 6]);
+ batchRequestBody = batchRequest.getHttpRequestBody();
+ return [4 /*yield*/, this._serviceContext.submitBatch(batchRequestBody, utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), tslib.__assign(tslib.__assign({}, options), { spanOptions: spanOptions }))];
+ case 2:
+ rawBatchResponse = _b.sent();
+ batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests());
+ return [4 /*yield*/, batchResponseParser.parseBatchResponse()];
+ case 3:
+ responseSummary = _b.sent();
+ res = {
+ _response: rawBatchResponse._response,
+ contentType: rawBatchResponse.contentType,
+ errorCode: rawBatchResponse.errorCode,
+ requestId: rawBatchResponse.requestId,
+ clientRequestId: rawBatchResponse.clientRequestId,
+ version: rawBatchResponse.version,
+ subResponses: responseSummary.subResponses,
+ subResponsesSucceededCount: responseSummary.subResponsesSucceededCount,
+ subResponsesFailedCount: responseSummary.subResponsesFailedCount
+ };
+ return [2 /*return*/, res];
+ case 4:
+ e_1 = _b.sent();
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_1.message
+ });
+ throw e_1;
+ case 5:
+ span.end();
+ return [7 /*endfinally*/];
+ case 6: return [2 /*return*/];
+ }
+ });
+ });
+ };
+ return BlobBatchClient;
+}());
+
+/**
+ * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you
+ * to manipulate blob containers.
+ *
+ * @export
+ * @class BlobServiceClient
+ */
+var BlobServiceClient = /** @class */ (function (_super) {
+ tslib.__extends(BlobServiceClient, _super);
+ function BlobServiceClient(url, credentialOrPipeline, options) {
+ var _this = this;
+ var pipeline;
+ if (credentialOrPipeline instanceof Pipeline) {
+ pipeline = credentialOrPipeline;
+ }
+ else if ((coreHttp.isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) ||
+ credentialOrPipeline instanceof AnonymousCredential ||
+ coreHttp.isTokenCredential(credentialOrPipeline)) {
+ pipeline = newPipeline(credentialOrPipeline, options);
+ }
+ else {
+ // The second parameter is undefined. Use anonymous credential
+ pipeline = newPipeline(new AnonymousCredential(), options);
+ }
+ _this = _super.call(this, url, pipeline) || this;
+ _this.serviceContext = new Service(_this.storageClientContext);
+ return _this;
+ }
+ /**
+ *
+ * Creates an instance of BlobServiceClient from connection string.
+ *
+ * @param {string} connectionString Account connection string or a SAS connection string of an Azure storage account.
+ * [ Note - Account connection string can only be used in NODE.JS runtime. ]
+ * Account connection string example -
+ * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`
+ * SAS connection string example -
+ * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`
+ * @param {StoragePipelineOptions} [options] Optional. Options to configure the HTTP pipeline.
+ * @memberof BlobServiceClient
+ */
+ BlobServiceClient.fromConnectionString = function (connectionString, options) {
+ options = options || {};
+ var extractedCreds = extractConnectionStringParts(connectionString);
+ if (extractedCreds.kind === "AccountConnString") {
+ {
+ var sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey);
+ options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri);
+ var pipeline = newPipeline(sharedKeyCredential, options);
+ return new BlobServiceClient(extractedCreds.url, pipeline);
+ }
+ }
+ else if (extractedCreds.kind === "SASConnString") {
+ var pipeline = newPipeline(new AnonymousCredential(), options);
+ return new BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline);
+ }
+ else {
+ throw new Error("Connection string must be either an Account connection string or a SAS connection string");
+ }
+ };
+ /**
+ * Creates a {@link ContainerClient} object
+ *
+ * @param {string} containerName A container name
+ * @returns {ContainerClient} A new ContainerClient object for the given container name.
+ * @memberof BlobServiceClient
+ *
+ * Example usage:
+ *
+ * ```js
+ * const containerClient = blobServiceClient.getContainerClient("");
+ * ```
+ */
+ BlobServiceClient.prototype.getContainerClient = function (containerName) {
+ return new ContainerClient(appendToURLPath(this.url, encodeURIComponent(containerName)), this.pipeline);
+ };
+ /**
+ * Create a Blob container.
+ *
+ * @param {string} containerName Name of the container to create.
+ * @param {ContainerCreateOptions} [options] Options to configure Container Create operation.
+ * @returns {Promise<{ containerClient: ContainerClient; containerCreateResponse: ContainerCreateResponse }>} Container creation response and the corresponding container client.
+ * @memberof BlobServiceClient
+ */
+ BlobServiceClient.prototype.createContainer = function (containerName, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var _a, span, spanOptions, containerClient, containerCreateResponse, e_1;
+ return tslib.__generator(this, function (_b) {
+ switch (_b.label) {
+ case 0:
+ _a = createSpan("BlobServiceClient-createContainer", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _b.label = 1;
+ case 1:
+ _b.trys.push([1, 3, 4, 5]);
+ containerClient = this.getContainerClient(containerName);
+ return [4 /*yield*/, containerClient.create(tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
+ case 2:
+ containerCreateResponse = _b.sent();
+ return [2 /*return*/, {
+ containerClient: containerClient,
+ containerCreateResponse: containerCreateResponse
+ }];
+ case 3:
+ e_1 = _b.sent();
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_1.message
+ });
+ throw e_1;
+ case 4:
+ span.end();
+ return [7 /*endfinally*/];
+ case 5: return [2 /*return*/];
+ }
+ });
+ });
+ };
+ /**
+ * Deletes a Blob container.
+ *
+ * @param {string} containerName Name of the container to delete.
+ * @param {ContainerDeleteMethodOptions} [options] Options to configure Container Delete operation.
+ * @returns {Promise} Container deletion response.
+ * @memberof BlobServiceClient
+ */
+ BlobServiceClient.prototype.deleteContainer = function (containerName, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var _a, span, spanOptions, containerClient, e_2;
+ return tslib.__generator(this, function (_b) {
+ switch (_b.label) {
+ case 0:
+ _a = createSpan("BlobServiceClient-deleteContainer", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _b.label = 1;
+ case 1:
+ _b.trys.push([1, 3, 4, 5]);
+ containerClient = this.getContainerClient(containerName);
+ return [4 /*yield*/, containerClient.delete(tslib.__assign(tslib.__assign({}, options), { tracingOptions: tslib.__assign(tslib.__assign({}, options.tracingOptions), { spanOptions: spanOptions }) }))];
+ case 2: return [2 /*return*/, _b.sent()];
+ case 3:
+ e_2 = _b.sent();
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_2.message
+ });
+ throw e_2;
+ case 4:
+ span.end();
+ return [7 /*endfinally*/];
+ case 5: return [2 /*return*/];
+ }
+ });
+ });
+ };
+ /**
+ * Gets the properties of a storage account’s Blob service, including properties
+ * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties
+ *
+ * @param {ServiceGetPropertiesOptions} [options] Options to the Service Get Properties operation.
+ * @returns {Promise} Response data for the Service Get Properties operation.
+ * @memberof BlobServiceClient
+ */
+ BlobServiceClient.prototype.getProperties = function (options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var _a, span, spanOptions, e_3;
+ return tslib.__generator(this, function (_b) {
+ switch (_b.label) {
+ case 0:
+ _a = createSpan("BlobServiceClient-getProperties", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _b.label = 1;
+ case 1:
+ _b.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this.serviceContext.getProperties({
+ abortSignal: options.abortSignal,
+ spanOptions: spanOptions
+ })];
+ case 2: return [2 /*return*/, _b.sent()];
+ case 3:
+ e_3 = _b.sent();
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_3.message
+ });
+ throw e_3;
+ case 4:
+ span.end();
+ return [7 /*endfinally*/];
+ case 5: return [2 /*return*/];
+ }
+ });
+ });
+ };
+ /**
+ * Sets properties for a storage account’s Blob service endpoint, including properties
+ * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties}
+ *
+ * @param {BlobServiceProperties} properties
+ * @param {ServiceSetPropertiesOptions} [options] Options to the Service Set Properties operation.
+ * @returns {Promise} Response data for the Service Set Properties operation.
+ * @memberof BlobServiceClient
+ */
+ BlobServiceClient.prototype.setProperties = function (properties, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var _a, span, spanOptions, e_4;
+ return tslib.__generator(this, function (_b) {
+ switch (_b.label) {
+ case 0:
+ _a = createSpan("BlobServiceClient-setProperties", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _b.label = 1;
+ case 1:
+ _b.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this.serviceContext.setProperties(properties, {
+ abortSignal: options.abortSignal,
+ spanOptions: spanOptions
+ })];
+ case 2: return [2 /*return*/, _b.sent()];
+ case 3:
+ e_4 = _b.sent();
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_4.message
+ });
+ throw e_4;
+ case 4:
+ span.end();
+ return [7 /*endfinally*/];
+ case 5: return [2 /*return*/];
+ }
+ });
+ });
+ };
+ /**
+ * Retrieves statistics related to replication for the Blob service. It is only
+ * available on the secondary location endpoint when read-access geo-redundant
+ * replication is enabled for the storage account.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats}
+ *
+ * @param {ServiceGetStatisticsOptions} [options] Options to the Service Get Statistics operation.
+ * @returns {Promise} Response data for the Service Get Statistics operation.
+ * @memberof BlobServiceClient
+ */
+ BlobServiceClient.prototype.getStatistics = function (options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var _a, span, spanOptions, e_5;
+ return tslib.__generator(this, function (_b) {
+ switch (_b.label) {
+ case 0:
+ _a = createSpan("BlobServiceClient-getStatistics", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _b.label = 1;
+ case 1:
+ _b.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this.serviceContext.getStatistics({
+ abortSignal: options.abortSignal,
+ spanOptions: spanOptions
+ })];
+ case 2: return [2 /*return*/, _b.sent()];
+ case 3:
+ e_5 = _b.sent();
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_5.message
+ });
+ throw e_5;
+ case 4:
+ span.end();
+ return [7 /*endfinally*/];
+ case 5: return [2 /*return*/];
+ }
+ });
+ });
+ };
+ /**
+ * The Get Account Information operation returns the sku name and account kind
+ * for the specified account.
+ * The Get Account Information operation is available on service versions beginning
+ * with version 2018-03-28.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information
+ *
+ * @param {ServiceGetAccountInfoOptions} [options] Options to the Service Get Account Info operation.
+ * @returns {Promise} Response data for the Service Get Account Info operation.
+ * @memberof BlobServiceClient
+ */
+ BlobServiceClient.prototype.getAccountInfo = function (options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var _a, span, spanOptions, e_6;
+ return tslib.__generator(this, function (_b) {
+ switch (_b.label) {
+ case 0:
+ _a = createSpan("BlobServiceClient-getAccountInfo", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _b.label = 1;
+ case 1:
+ _b.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this.serviceContext.getAccountInfo({
+ abortSignal: options.abortSignal,
+ spanOptions: spanOptions
+ })];
+ case 2: return [2 /*return*/, _b.sent()];
+ case 3:
+ e_6 = _b.sent();
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_6.message
+ });
+ throw e_6;
+ case 4:
+ span.end();
+ return [7 /*endfinally*/];
+ case 5: return [2 /*return*/];
+ }
+ });
+ });
+ };
+ /**
+ * Returns a list of the containers under the specified account.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2
+ *
+ * @param {string} [marker] A string value that identifies the portion of
+ * the list of containers to be returned with the next listing operation. The
+ * operation returns the NextMarker value within the response body if the
+ * listing operation did not return all containers remaining to be listed
+ * with the current page. The NextMarker value can be used as the value for
+ * the marker parameter in a subsequent call to request the next page of list
+ * items. The marker value is opaque to the client.
+ * @param {ServiceListContainersSegmentOptions} [options] Options to the Service List Container Segment operation.
+ * @returns {Promise} Response data for the Service List Container Segment operation.
+ * @memberof BlobServiceClient
+ */
+ BlobServiceClient.prototype.listContainersSegment = function (marker, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var _a, span, spanOptions, e_7;
+ return tslib.__generator(this, function (_b) {
+ switch (_b.label) {
+ case 0:
+ _a = createSpan("BlobServiceClient-listContainersSegment", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _b.label = 1;
+ case 1:
+ _b.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this.serviceContext.listContainersSegment(tslib.__assign(tslib.__assign({ abortSignal: options.abortSignal, marker: marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include, spanOptions: spanOptions }))];
+ case 2: return [2 /*return*/, _b.sent()];
+ case 3:
+ e_7 = _b.sent();
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_7.message
+ });
+ throw e_7;
+ case 4:
+ span.end();
+ return [7 /*endfinally*/];
+ case 5: return [2 /*return*/];
+ }
+ });
+ });
+ };
+ /**
+ * The Filter Blobs operation enables callers to list blobs across all containers whose tags
+ * match a given search expression. Filter blobs searches across all containers within a
+ * storage account but can be scoped within the expression to a single container.
+ *
+ * @private
+ * @param {string} tagFilterSqlExpression The where parameter enables the caller to query blobs whose tags match a given expression.
+ * The given expression must evaluate to true for a blob to be returned in the results.
+ * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;
+ * however, only a subset of the OData filter syntax is supported in the Blob service.
+ * @param {string} [marker] A string value that identifies the portion of
+ * the list of blobs to be returned with the next listing operation. The
+ * operation returns the NextMarker value within the response body if the
+ * listing operation did not return all blobs remaining to be listed
+ * with the current page. The NextMarker value can be used as the value for
+ * the marker parameter in a subsequent call to request the next page of list
+ * items. The marker value is opaque to the client.
+ * @param {ServiceFindBlobsByTagsSegmentOptions} [options={}] Options to find blobs by tags.
+ * @returns {Promise}
+ * @memberof BlobServiceClient
+ */
+ BlobServiceClient.prototype.findBlobsByTagsSegment = function (tagFilterSqlExpression, marker, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var _a, span, spanOptions, e_8;
+ return tslib.__generator(this, function (_b) {
+ switch (_b.label) {
+ case 0:
+ _a = createSpan("BlobServiceClient-findBlobsByTagsSegment", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _b.label = 1;
+ case 1:
+ _b.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this.serviceContext.filterBlobs({
+ abortSignal: options.abortSignal,
+ where: tagFilterSqlExpression,
+ marker: marker,
+ maxPageSize: options.maxPageSize,
+ spanOptions: spanOptions
+ })];
+ case 2: return [2 /*return*/, _b.sent()];
+ case 3:
+ e_8 = _b.sent();
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_8.message
+ });
+ throw e_8;
+ case 4:
+ span.end();
+ return [7 /*endfinally*/];
+ case 5: return [2 /*return*/];
+ }
+ });
+ });
+ };
+ /**
+ * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse.
+ *
+ * @private
+ * @param {string} tagFilterSqlExpression The where parameter enables the caller to query blobs whose tags match a given expression.
+ * The given expression must evaluate to true for a blob to be returned in the results.
+ * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;
+ * however, only a subset of the OData filter syntax is supported in the Blob service.
+ * @param {string} [marker] A string value that identifies the portion of
+ * the list of blobs to be returned with the next listing operation. The
+ * operation returns the NextMarker value within the response body if the
+ * listing operation did not return all blobs remaining to be listed
+ * with the current page. The NextMarker value can be used as the value for
+ * the marker parameter in a subsequent call to request the next page of list
+ * items. The marker value is opaque to the client.
+ * @param {ServiceFindBlobsByTagsSegmentOptions} [options={}] Options to find blobs by tags.
+ * @returns {AsyncIterableIterator}
+ * @memberof BlobServiceClient
+ */
+ BlobServiceClient.prototype.findBlobsByTagsSegments = function (tagFilterSqlExpression, marker, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__asyncGenerator(this, arguments, function findBlobsByTagsSegments_1() {
+ var response;
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0:
+ if (!(!!marker || marker === undefined)) return [3 /*break*/, 6];
+ _a.label = 1;
+ case 1: return [4 /*yield*/, tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options))];
+ case 2:
+ response = _a.sent();
+ response.blobs = response.blobs || [];
+ marker = response.continuationToken;
+ return [4 /*yield*/, tslib.__await(response)];
+ case 3: return [4 /*yield*/, _a.sent()];
+ case 4:
+ _a.sent();
+ _a.label = 5;
+ case 5:
+ if (marker) return [3 /*break*/, 1];
+ _a.label = 6;
+ case 6: return [2 /*return*/];
+ }
+ });
+ });
+ };
+ /**
+ * Returns an AsyncIterableIterator for blobs.
+ *
+ * @private
+ * @param {string} tagFilterSqlExpression The where parameter enables the caller to query blobs whose tags match a given expression.
+ * The given expression must evaluate to true for a blob to be returned in the results.
+ * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;
+ * however, only a subset of the OData filter syntax is supported in the Blob service.
+ * @param {ServiceFindBlobsByTagsSegmentOptions} [options={}] Options to findBlobsByTagsItems.
+ * @returns {AsyncIterableIterator}
+ * @memberof BlobServiceClient
+ */
+ BlobServiceClient.prototype.findBlobsByTagsItems = function (tagFilterSqlExpression, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__asyncGenerator(this, arguments, function findBlobsByTagsItems_1() {
+ var marker, _a, _b, segment, e_9_1;
+ var e_9, _c;
+ return tslib.__generator(this, function (_d) {
+ switch (_d.label) {
+ case 0:
+ _d.trys.push([0, 7, 8, 13]);
+ _a = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options));
+ _d.label = 1;
+ case 1: return [4 /*yield*/, tslib.__await(_a.next())];
+ case 2:
+ if (!(_b = _d.sent(), !_b.done)) return [3 /*break*/, 6];
+ segment = _b.value;
+ return [5 /*yield**/, tslib.__values(tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs)))];
+ case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_d.sent()])];
+ case 4:
+ _d.sent();
+ _d.label = 5;
+ case 5: return [3 /*break*/, 1];
+ case 6: return [3 /*break*/, 13];
+ case 7:
+ e_9_1 = _d.sent();
+ e_9 = { error: e_9_1 };
+ return [3 /*break*/, 13];
+ case 8:
+ _d.trys.push([8, , 11, 12]);
+ if (!(_b && !_b.done && (_c = _a.return))) return [3 /*break*/, 10];
+ return [4 /*yield*/, tslib.__await(_c.call(_a))];
+ case 9:
+ _d.sent();
+ _d.label = 10;
+ case 10: return [3 /*break*/, 12];
+ case 11:
+ if (e_9) throw e_9.error;
+ return [7 /*endfinally*/];
+ case 12: return [7 /*endfinally*/];
+ case 13: return [2 /*return*/];
+ }
+ });
+ });
+ };
+ /**
+ * Returns an async iterable iterator to find all blobs with specified tag
+ * under the specified account.
+ *
+ * .byPage() returns an async iterable iterator to list the blobs in pages.
+ *
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties
+ *
+ * Example using `for await` syntax:
+ *
+ * ```js
+ * let i = 1;
+ * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) {
+ * console.log(`Blob ${i++}: ${container.name}`);
+ * }
+ * ```
+ *
+ * Example using `iter.next()`:
+ *
+ * ```js
+ * let i = 1;
+ * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'");
+ * let blobItem = await iter.next();
+ * while (!blobItem.done) {
+ * console.log(`Blob ${i++}: ${blobItem.value.name}`);
+ * blobItem = await iter.next();
+ * }
+ * ```
+ *
+ * Example using `byPage()`:
+ *
+ * ```js
+ * // passing optional maxPageSize in the page settings
+ * let i = 1;
+ * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) {
+ * if (response.blobs) {
+ * for (const blob of response.blobs) {
+ * console.log(`Blob ${i++}: ${blob.name}`);
+ * }
+ * }
+ * }
+ * ```
+ *
+ * Example using paging with a marker:
+ *
+ * ```js
+ * let i = 1;
+ * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 });
+ * let response = (await iterator.next()).value;
+ *
+ * // Prints 2 blob names
+ * if (response.blobs) {
+ * for (const blob of response.blobs) {
+ * console.log(`Blob ${i++}: ${blob.name}`);
+ * }
+ * }
+ *
+ * // Gets next marker
+ * let marker = response.continuationToken;
+ * // Passing next marker as continuationToken
+ * iterator = blobServiceClient
+ * .findBlobsByTags("tagkey='tagvalue'")
+ * .byPage({ continuationToken: marker, maxPageSize: 10 });
+ * response = (await iterator.next()).value;
+ *
+ * // Prints blob names
+ * if (response.blobs) {
+ * for (const blob of response.blobs) {
+ * console.log(`Blob ${i++}: ${blob.name}`);
+ * }
+ * }
+ * ```
+ *
+ * @param {string} tagFilterSqlExpression The where parameter enables the caller to query blobs whose tags match a given expression.
+ * The given expression must evaluate to true for a blob to be returned in the results.
+ * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;
+ * however, only a subset of the OData filter syntax is supported in the Blob service.
+ * @param {ServiceFindBlobByTagsOptions} [options={}] Options to find blobs by tags.
+ * @returns {PagedAsyncIterableIterator}
+ * @memberof BlobServiceClient
+ */
+ BlobServiceClient.prototype.findBlobsByTags = function (tagFilterSqlExpression, options) {
+ var _a;
+ var _this = this;
+ if (options === void 0) { options = {}; }
+ // AsyncIterableIterator to iterate over blobs
+ var listSegmentOptions = tslib.__assign({}, options);
+ var iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions);
+ return _a = {
+ /**
+ * @member {Promise} [next] The next method, part of the iteration protocol
+ */
+ next: function () {
+ return iter.next();
+ }
+ },
+ /**
+ * @member {Symbol} [asyncIterator] The connection to the async iterator, part of the iteration protocol
+ */
+ _a[Symbol.asyncIterator] = function () {
+ return this;
+ },
+ /**
+ * @member {Function} [byPage] Return an AsyncIterableIterator that works a page at a time
+ */
+ _a.byPage = function (settings) {
+ if (settings === void 0) { settings = {}; }
+ return _this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, tslib.__assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions));
+ },
+ _a;
+ };
+ /**
+ * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses
+ *
+ * @private
+ * @param {string} [marker] A string value that identifies the portion of
+ * the list of containers to be returned with the next listing operation. The
+ * operation returns the NextMarker value within the response body if the
+ * listing operation did not return all containers remaining to be listed
+ * with the current page. The NextMarker value can be used as the value for
+ * the marker parameter in a subsequent call to request the next page of list
+ * items. The marker value is opaque to the client.
+ * @param {ServiceListContainersSegmentOptions} [options] Options to list containers operation.
+ * @returns {AsyncIterableIterator}
+ * @memberof BlobServiceClient
+ */
+ BlobServiceClient.prototype.listSegments = function (marker, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__asyncGenerator(this, arguments, function listSegments_1() {
+ var listContainersSegmentResponse;
+ return tslib.__generator(this, function (_a) {
+ switch (_a.label) {
+ case 0:
+ if (!(!!marker || marker === undefined)) return [3 /*break*/, 7];
+ _a.label = 1;
+ case 1: return [4 /*yield*/, tslib.__await(this.listContainersSegment(marker, options))];
+ case 2:
+ listContainersSegmentResponse = _a.sent();
+ listContainersSegmentResponse.containerItems =
+ listContainersSegmentResponse.containerItems || [];
+ marker = listContainersSegmentResponse.continuationToken;
+ return [4 /*yield*/, tslib.__await(listContainersSegmentResponse)];
+ case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_a.sent()])];
+ case 4: return [4 /*yield*/, _a.sent()];
+ case 5:
+ _a.sent();
+ _a.label = 6;
+ case 6:
+ if (marker) return [3 /*break*/, 1];
+ _a.label = 7;
+ case 7: return [2 /*return*/];
+ }
+ });
+ });
+ };
+ /**
+ * Returns an AsyncIterableIterator for Container Items
+ *
+ * @private
+ * @param {ServiceListContainersSegmentOptions} [options] Options to list containers operation.
+ * @returns {AsyncIterableIterator}
+ * @memberof BlobServiceClient
+ */
+ BlobServiceClient.prototype.listItems = function (options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__asyncGenerator(this, arguments, function listItems_1() {
+ var marker, _a, _b, segment, e_10_1;
+ var e_10, _c;
+ return tslib.__generator(this, function (_d) {
+ switch (_d.label) {
+ case 0:
+ _d.trys.push([0, 7, 8, 13]);
+ _a = tslib.__asyncValues(this.listSegments(marker, options));
+ _d.label = 1;
+ case 1: return [4 /*yield*/, tslib.__await(_a.next())];
+ case 2:
+ if (!(_b = _d.sent(), !_b.done)) return [3 /*break*/, 6];
+ segment = _b.value;
+ return [5 /*yield**/, tslib.__values(tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems)))];
+ case 3: return [4 /*yield*/, tslib.__await.apply(void 0, [_d.sent()])];
+ case 4:
+ _d.sent();
+ _d.label = 5;
+ case 5: return [3 /*break*/, 1];
+ case 6: return [3 /*break*/, 13];
+ case 7:
+ e_10_1 = _d.sent();
+ e_10 = { error: e_10_1 };
+ return [3 /*break*/, 13];
+ case 8:
+ _d.trys.push([8, , 11, 12]);
+ if (!(_b && !_b.done && (_c = _a.return))) return [3 /*break*/, 10];
+ return [4 /*yield*/, tslib.__await(_c.call(_a))];
+ case 9:
+ _d.sent();
+ _d.label = 10;
+ case 10: return [3 /*break*/, 12];
+ case 11:
+ if (e_10) throw e_10.error;
+ return [7 /*endfinally*/];
+ case 12: return [7 /*endfinally*/];
+ case 13: return [2 /*return*/];
+ }
+ });
+ });
+ };
+ /**
+ * Returns an async iterable iterator to list all the containers
+ * under the specified account.
+ *
+ * .byPage() returns an async iterable iterator to list the containers in pages.
+ *
+ * Example using `for await` syntax:
+ *
+ * ```js
+ * let i = 1;
+ * for await (const container of blobServiceClient.listContainers()) {
+ * console.log(`Container ${i++}: ${container.name}`);
+ * }
+ * ```
+ *
+ * Example using `iter.next()`:
+ *
+ * ```js
+ * let i = 1;
+ * const iter = blobServiceClient.listContainers();
+ * let containerItem = await iter.next();
+ * while (!containerItem.done) {
+ * console.log(`Container ${i++}: ${containerItem.value.name}`);
+ * containerItem = await iter.next();
+ * }
+ * ```
+ *
+ * Example using `byPage()`:
+ *
+ * ```js
+ * // passing optional maxPageSize in the page settings
+ * let i = 1;
+ * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) {
+ * if (response.containerItems) {
+ * for (const container of response.containerItems) {
+ * console.log(`Container ${i++}: ${container.name}`);
+ * }
+ * }
+ * }
+ * ```
+ *
+ * Example using paging with a marker:
+ *
+ * ```js
+ * let i = 1;
+ * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 });
+ * let response = (await iterator.next()).value;
+ *
+ * // Prints 2 container names
+ * if (response.containerItems) {
+ * for (const container of response.containerItems) {
+ * console.log(`Container ${i++}: ${container.name}`);
+ * }
+ * }
+ *
+ * // Gets next marker
+ * let marker = response.continuationToken;
+ * // Passing next marker as continuationToken
+ * iterator = blobServiceClient
+ * .listContainers()
+ * .byPage({ continuationToken: marker, maxPageSize: 10 });
+ * response = (await iterator.next()).value;
+ *
+ * // Prints 10 container names
+ * if (response.containerItems) {
+ * for (const container of response.containerItems) {
+ * console.log(`Container ${i++}: ${container.name}`);
+ * }
+ * }
+ * ```
+ *
+ * @param {ServiceListContainersOptions} [options={}] Options to list containers.
+ * @returns {PagedAsyncIterableIterator} An asyncIterableIterator that supports paging.
+ * @memberof BlobServiceClient
+ */
+ BlobServiceClient.prototype.listContainers = function (options) {
+ var _a;
+ var _this = this;
+ if (options === void 0) { options = {}; }
+ if (options.prefix === "") {
+ options.prefix = undefined;
+ }
+ // AsyncIterableIterator to iterate over containers
+ var listSegmentOptions = tslib.__assign(tslib.__assign({}, options), (options.includeMetadata ? { include: "metadata" } : {}));
+ var iter = this.listItems(listSegmentOptions);
+ return _a = {
+ /**
+ * @member {Promise} [next] The next method, part of the iteration protocol
+ */
+ next: function () {
+ return iter.next();
+ }
+ },
+ /**
+ * @member {Symbol} [asyncIterator] The connection to the async iterator, part of the iteration protocol
+ */
+ _a[Symbol.asyncIterator] = function () {
+ return this;
+ },
+ /**
+ * @member {Function} [byPage] Return an AsyncIterableIterator that works a page at a time
+ */
+ _a.byPage = function (settings) {
+ if (settings === void 0) { settings = {}; }
+ return _this.listSegments(settings.continuationToken, tslib.__assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions));
+ },
+ _a;
+ };
+ /**
+ * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential).
+ *
+ * Retrieves a user delegation key for the Blob service. This is only a valid operation when using
+ * bearer token authentication.
+ *
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key
+ *
+ * @param {Date} startsOn The start time for the user delegation SAS. Must be within 7 days of the current time
+ * @param {Date} expiresOn The end time for the user delegation SAS. Must be within 7 days of the current time
+ * @returns {Promise}
+ * @memberof BlobServiceClient
+ */
+ BlobServiceClient.prototype.getUserDelegationKey = function (startsOn, expiresOn, options) {
+ if (options === void 0) { options = {}; }
+ return tslib.__awaiter(this, void 0, void 0, function () {
+ var _a, span, spanOptions, response, userDelegationKey, res, e_11;
+ return tslib.__generator(this, function (_b) {
+ switch (_b.label) {
+ case 0:
+ _a = createSpan("BlobServiceClient-getUserDelegationKey", options.tracingOptions), span = _a.span, spanOptions = _a.spanOptions;
+ _b.label = 1;
+ case 1:
+ _b.trys.push([1, 3, 4, 5]);
+ return [4 /*yield*/, this.serviceContext.getUserDelegationKey({
+ startsOn: truncatedISO8061Date(startsOn, false),
+ expiresOn: truncatedISO8061Date(expiresOn, false)
+ }, {
+ abortSignal: options.abortSignal,
+ spanOptions: spanOptions
+ })];
+ case 2:
+ response = _b.sent();
+ userDelegationKey = {
+ signedObjectId: response.signedObjectId,
+ signedTenantId: response.signedTenantId,
+ signedStartsOn: new Date(response.signedStartsOn),
+ signedExpiresOn: new Date(response.signedExpiresOn),
+ signedService: response.signedService,
+ signedVersion: response.signedVersion,
+ value: response.value
+ };
+ res = tslib.__assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey);
+ return [2 /*return*/, res];
+ case 3:
+ e_11 = _b.sent();
+ span.setStatus({
+ code: api.CanonicalCode.UNKNOWN,
+ message: e_11.message
+ });
+ throw e_11;
+ case 4:
+ span.end();
+ return [7 /*endfinally*/];
+ case 5: return [2 /*return*/];
+ }
+ });
+ });
+ };
+ /**
+ * Creates a BlobBatchClient object to conduct batch operations.
+ *
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch
+ *
+ * @returns {BlobBatchClient} A new BlobBatchClient object for this service.
+ * @memberof BlobServiceClient
+ */
+ BlobServiceClient.prototype.getBlobBatchClient = function () {
+ return new BlobBatchClient(this.url, this.pipeline);
+ };
+ return BlobServiceClient;
+}(StorageClient));
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ *
+ * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value
+ * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the
+ * values are set, this should be serialized with toString and set as the permissions field on an
+ * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but
+ * the order of the permissions is particular and this class guarantees correctness.
+ *
+ * @export
+ * @class AccountSASPermissions
+ */
+var AccountSASPermissions = /** @class */ (function () {
+ function AccountSASPermissions() {
+ /**
+ * Permission to read resources and list queues and tables granted.
+ *
+ * @type {boolean}
+ * @memberof AccountSASPermissions
+ */
+ this.read = false;
+ /**
+ * Permission to write resources granted.
+ *
+ * @type {boolean}
+ * @memberof AccountSASPermissions
+ */
+ this.write = false;
+ /**
+ * Permission to create blobs and files granted.
+ *
+ * @type {boolean}
+ * @memberof AccountSASPermissions
+ */
+ this.delete = false;
+ /**
+ * Permission to delete versions granted.
+ *
+ * @type {boolean}
+ * @memberof AccountSASPermissions
+ */
+ this.deleteVersion = false;
+ /**
+ * Permission to list blob containers, blobs, shares, directories, and files granted.
+ *
+ * @type {boolean}
+ * @memberof AccountSASPermissions
+ */
+ this.list = false;
+ /**
+ * Permission to add messages, table entities, and append to blobs granted.
+ *
+ * @type {boolean}
+ * @memberof AccountSASPermissions
+ */
+ this.add = false;
+ /**
+ * Permission to create blobs and files granted.
+ *
+ * @type {boolean}
+ * @memberof AccountSASPermissions
+ */
+ this.create = false;
+ /**
+ * Permissions to update messages and table entities granted.
+ *
+ * @type {boolean}
+ * @memberof AccountSASPermissions
+ */
+ this.update = false;
+ /**
+ * Permission to get and delete messages granted.
+ *
+ * @type {boolean}
+ * @memberof AccountSASPermissions
+ */
+ this.process = false;
+ /**
+ * Specfies Tag access granted.
+ *
+ * @type {boolean}
+ * @memberof AccountSASPermissions
+ */
+ this.tag = false;
+ /**
+ * Permission to filter blobs.
+ *
+ * @type {boolean}
+ * @memberof AccountSASPermissions
+ */
+ this.filter = false;
+ }
+ /**
+ * Parse initializes the AccountSASPermissions fields from a string.
+ *
+ * @static
+ * @param {string} permissions
+ * @returns {AccountSASPermissions}
+ * @memberof AccountSASPermissions
+ */
+ AccountSASPermissions.parse = function (permissions) {
+ var accountSASPermissions = new AccountSASPermissions();
+ for (var _i = 0, permissions_1 = permissions; _i < permissions_1.length; _i++) {
+ var c = permissions_1[_i];
+ switch (c) {
+ case "r":
+ accountSASPermissions.read = true;
+ break;
+ case "w":
+ accountSASPermissions.write = true;
+ break;
+ case "d":
+ accountSASPermissions.delete = true;
+ break;
+ case "x":
+ accountSASPermissions.deleteVersion = true;
+ break;
+ case "l":
+ accountSASPermissions.list = true;
+ break;
+ case "a":
+ accountSASPermissions.add = true;
+ break;
+ case "c":
+ accountSASPermissions.create = true;
+ break;
+ case "u":
+ accountSASPermissions.update = true;
+ break;
+ case "p":
+ accountSASPermissions.process = true;
+ break;
+ case "t":
+ accountSASPermissions.tag = true;
+ break;
+ case "f":
+ accountSASPermissions.filter = true;
+ break;
+ default:
+ throw new RangeError("Invalid permission character: " + c);
+ }
+ }
+ return accountSASPermissions;
+ };
+ /**
+ * Produces the SAS permissions string for an Azure Storage account.
+ * Call this method to set AccountSASSignatureValues Permissions field.
+ *
+ * Using this method will guarantee the resource types are in
+ * an order accepted by the service.
+ *
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas
+ *
+ * @returns {string}
+ * @memberof AccountSASPermissions
+ */
+ AccountSASPermissions.prototype.toString = function () {
+ // The order of the characters should be as specified here to ensure correctness:
+ // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas
+ // Use a string array instead of string concatenating += operator for performance
+ var permissions = [];
+ if (this.read) {
+ permissions.push("r");
+ }
+ if (this.write) {
+ permissions.push("w");
+ }
+ if (this.delete) {
+ permissions.push("d");
+ }
+ if (this.deleteVersion) {
+ permissions.push("x");
+ }
+ if (this.filter) {
+ permissions.push("f");
+ }
+ if (this.tag) {
+ permissions.push("t");
+ }
+ if (this.list) {
+ permissions.push("l");
+ }
+ if (this.add) {
+ permissions.push("a");
+ }
+ if (this.create) {
+ permissions.push("c");
+ }
+ if (this.update) {
+ permissions.push("u");
+ }
+ if (this.process) {
+ permissions.push("p");
+ }
+ return permissions.join("");
+ };
+ return AccountSASPermissions;
+}());
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ *
+ * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value
+ * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the
+ * values are set, this should be serialized with toString and set as the resources field on an
+ * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but
+ * the order of the resources is particular and this class guarantees correctness.
+ *
+ * @export
+ * @class AccountSASResourceTypes
+ */
+var AccountSASResourceTypes = /** @class */ (function () {
+ function AccountSASResourceTypes() {
+ /**
+ * Permission to access service level APIs granted.
+ *
+ * @type {boolean}
+ * @memberof AccountSASResourceTypes
+ */
+ this.service = false;
+ /**
+ * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted.
+ *
+ * @type {boolean}
+ * @memberof AccountSASResourceTypes
+ */
+ this.container = false;
+ /**
+ * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted.
+ *
+ * @type {boolean}
+ * @memberof AccountSASResourceTypes
+ */
+ this.object = false;
+ }
+ /**
+ * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an
+ * Error if it encounters a character that does not correspond to a valid resource type.
+ *
+ * @static
+ * @param {string} resourceTypes
+ * @returns {AccountSASResourceTypes}
+ * @memberof AccountSASResourceTypes
+ */
+ AccountSASResourceTypes.parse = function (resourceTypes) {
+ var accountSASResourceTypes = new AccountSASResourceTypes();
+ for (var _i = 0, resourceTypes_1 = resourceTypes; _i < resourceTypes_1.length; _i++) {
+ var c = resourceTypes_1[_i];
+ switch (c) {
+ case "s":
+ accountSASResourceTypes.service = true;
+ break;
+ case "c":
+ accountSASResourceTypes.container = true;
+ break;
+ case "o":
+ accountSASResourceTypes.object = true;
+ break;
+ default:
+ throw new RangeError("Invalid resource type: " + c);
+ }
+ }
+ return accountSASResourceTypes;
+ };
+ /**
+ * Converts the given resource types to a string.
+ *
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas
+ *
+ * @returns {string}
+ * @memberof AccountSASResourceTypes
+ */
+ AccountSASResourceTypes.prototype.toString = function () {
+ var resourceTypes = [];
+ if (this.service) {
+ resourceTypes.push("s");
+ }
+ if (this.container) {
+ resourceTypes.push("c");
+ }
+ if (this.object) {
+ resourceTypes.push("o");
+ }
+ return resourceTypes.join("");
+ };
+ return AccountSASResourceTypes;
+}());
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ *
+ * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value
+ * to true means that any SAS which uses these permissions will grant access to that service. Once all the
+ * values are set, this should be serialized with toString and set as the services field on an
+ * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but
+ * the order of the services is particular and this class guarantees correctness.
+ *
+ * @export
+ * @class AccountSASServices
+ */
+var AccountSASServices = /** @class */ (function () {
+ function AccountSASServices() {
+ /**
+ * Permission to access blob resources granted.
+ *
+ * @type {boolean}
+ * @memberof AccountSASServices
+ */
+ this.blob = false;
+ /**
+ * Permission to access file resources granted.
+ *
+ * @type {boolean}
+ * @memberof AccountSASServices
+ */
+ this.file = false;
+ /**
+ * Permission to access queue resources granted.
+ *
+ * @type {boolean}
+ * @memberof AccountSASServices
+ */
+ this.queue = false;
+ /**
+ * Permission to access table resources granted.
+ *
+ * @type {boolean}
+ * @memberof AccountSASServices
+ */
+ this.table = false;
+ }
+ /**
+ * Creates an {@link AccountSASServices} from the specified services string. This method will throw an
+ * Error if it encounters a character that does not correspond to a valid service.
+ *
+ * @static
+ * @param {string} services
+ * @returns {AccountSASServices}
+ * @memberof AccountSASServices
+ */
+ AccountSASServices.parse = function (services) {
+ var accountSASServices = new AccountSASServices();
+ for (var _i = 0, services_1 = services; _i < services_1.length; _i++) {
+ var c = services_1[_i];
+ switch (c) {
+ case "b":
+ accountSASServices.blob = true;
+ break;
+ case "f":
+ accountSASServices.file = true;
+ break;
+ case "q":
+ accountSASServices.queue = true;
+ break;
+ case "t":
+ accountSASServices.table = true;
+ break;
+ default:
+ throw new RangeError("Invalid service character: " + c);
+ }
+ }
+ return accountSASServices;
+ };
+ /**
+ * Converts the given services to a string.
+ *
+ * @returns {string}
+ * @memberof AccountSASServices
+ */
+ AccountSASServices.prototype.toString = function () {
+ var services = [];
+ if (this.blob) {
+ services.push("b");
+ }
+ if (this.table) {
+ services.push("t");
+ }
+ if (this.queue) {
+ services.push("q");
+ }
+ if (this.file) {
+ services.push("f");
+ }
+ return services.join("");
+ };
+ return AccountSASServices;
+}());
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+/**
+ * Generate SasIPRange format string. For example:
+ *
+ * "8.8.8.8" or "1.1.1.1-255.255.255.255"
+ *
+ * @export
+ * @param {SasIPRange} ipRange
+ * @returns {string}
+ */
+function ipRangeToString(ipRange) {
+ return ipRange.end ? ipRange.start + "-" + ipRange.end : ipRange.start;
+}
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+(function (SASProtocol) {
+ /**
+ * Protocol that allows HTTPS only
+ */
+ SASProtocol["Https"] = "https";
+ /**
+ * Protocol that allows both HTTPS and HTTP
+ */
+ SASProtocol["HttpsAndHttp"] = "https,http";
+})(exports.SASProtocol || (exports.SASProtocol = {}));
+/**
+ * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly
+ * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues}
+ * types. Once generated, it can be encoded into a {@code String} and appended to a URL directly (though caution should
+ * be taken here in case there are existing query parameters, which might affect the appropriate means of appending
+ * these query parameters).
+ *
+ * NOTE: Instances of this class are immutable.
+ *
+ * @export
+ * @class SASQueryParameters
+ */
+var SASQueryParameters = /** @class */ (function () {
+ /**
+ * Creates an instance of SASQueryParameters.
+ *
+ * @param {string} version Representing the storage version
+ * @param {string} signature Representing the signature for the SAS token
+ * @param {string} [permissions] Representing the storage permissions
+ * @param {string} [services] Representing the storage services being accessed (only for Account SAS)
+ * @param {string} [resourceTypes] Representing the storage resource types being accessed (only for Account SAS)
+ * @param {SASProtocol} [protocol] Representing the allowed HTTP protocol(s)
+ * @param {Date} [startsOn] Representing the start time for this SAS token
+ * @param {Date} [expiresOn] Representing the expiry time for this SAS token
+ * @param {SasIPRange} [ipRange] Representing the range of valid IP addresses for this SAS token
+ * @param {string} [identifier] Representing the signed identifier (only for Service SAS)
+ * @param {string} [resource] Representing the storage container or blob (only for Service SAS)
+ * @param {string} [cacheControl] Representing the cache-control header (only for Blob/File Service SAS)
+ * @param {string} [contentDisposition] Representing the content-disposition header (only for Blob/File Service SAS)
+ * @param {string} [contentEncoding] Representing the content-encoding header (only for Blob/File Service SAS)
+ * @param {string} [contentLanguage] Representing the content-language header (only for Blob/File Service SAS)
+ * @param {string} [contentType] Representing the content-type header (only for Blob/File Service SAS)
+ * @param {userDelegationKey} [userDelegationKey] Representing the user delegation key properties
+ * @memberof SASQueryParameters
+ */
+ function SASQueryParameters(version, signature, permissions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey) {
+ this.version = version;
+ this.services = services;
+ this.resourceTypes = resourceTypes;
+ this.expiresOn = expiresOn;
+ this.permissions = permissions;
+ this.protocol = protocol;
+ this.startsOn = startsOn;
+ this.ipRangeInner = ipRange;
+ this.identifier = identifier;
+ this.resource = resource;
+ this.signature = signature;
+ this.cacheControl = cacheControl;
+ this.contentDisposition = contentDisposition;
+ this.contentEncoding = contentEncoding;
+ this.contentLanguage = contentLanguage;
+ this.contentType = contentType;
+ if (userDelegationKey) {
+ this.signedOid = userDelegationKey.signedObjectId;
+ this.signedTenantId = userDelegationKey.signedTenantId;
+ this.signedStartsOn = userDelegationKey.signedStartsOn;
+ this.signedExpiresOn = userDelegationKey.signedExpiresOn;
+ this.signedService = userDelegationKey.signedService;
+ this.signedVersion = userDelegationKey.signedVersion;
+ }
+ }
+ Object.defineProperty(SASQueryParameters.prototype, "ipRange", {
+ /**
+ * Optional. IP range allowed for this SAS.
+ *
+ * @readonly
+ * @type {(SasIPRange | undefined)}
+ * @memberof SASQueryParameters
+ */
+ get: function () {
+ if (this.ipRangeInner) {
+ return {
+ end: this.ipRangeInner.end,
+ start: this.ipRangeInner.start
+ };
+ }
+ return undefined;
+ },
+ enumerable: false,
+ configurable: true
+ });
+ /**
+ * Encodes all SAS query parameters into a string that can be appended to a URL.
+ *
+ * @returns {string}
+ * @memberof SASQueryParameters
+ */
+ SASQueryParameters.prototype.toString = function () {
+ var params = [
+ "sv",
+ "ss",
+ "srt",
+ "spr",
+ "st",
+ "se",
+ "sip",
+ "si",
+ "skoid",
+ "sktid",
+ "skt",
+ "ske",
+ "sks",
+ "skv",
+ "sr",
+ "sp",
+ "sig",
+ "rscc",
+ "rscd",
+ "rsce",
+ "rscl",
+ "rsct"
+ ];
+ var queries = [];
+ for (var _i = 0, params_1 = params; _i < params_1.length; _i++) {
+ var param = params_1[_i];
+ switch (param) {
+ case "sv":
+ this.tryAppendQueryParameter(queries, param, this.version);
+ break;
+ case "ss":
+ this.tryAppendQueryParameter(queries, param, this.services);
+ break;
+ case "srt":
+ this.tryAppendQueryParameter(queries, param, this.resourceTypes);
+ break;
+ case "spr":
+ this.tryAppendQueryParameter(queries, param, this.protocol);
+ break;
+ case "st":
+ this.tryAppendQueryParameter(queries, param, this.startsOn ? truncatedISO8061Date(this.startsOn, false) : undefined);
+ break;
+ case "se":
+ this.tryAppendQueryParameter(queries, param, this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : undefined);
+ break;
+ case "sip":
+ this.tryAppendQueryParameter(queries, param, this.ipRange ? ipRangeToString(this.ipRange) : undefined);
+ break;
+ case "si":
+ this.tryAppendQueryParameter(queries, param, this.identifier);
+ break;
+ case "skoid": // Signed object ID
+ this.tryAppendQueryParameter(queries, param, this.signedOid);
+ break;
+ case "sktid": // Signed tenant ID
+ this.tryAppendQueryParameter(queries, param, this.signedTenantId);
+ break;
+ case "skt": // Signed key start time
+ this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : undefined);
+ break;
+ case "ske": // Signed key expiry time
+ this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : undefined);
+ break;
+ case "sks": // Signed key service
+ this.tryAppendQueryParameter(queries, param, this.signedService);
+ break;
+ case "skv": // Signed key version
+ this.tryAppendQueryParameter(queries, param, this.signedVersion);
+ break;
+ case "sr":
+ this.tryAppendQueryParameter(queries, param, this.resource);
+ break;
+ case "sp":
+ this.tryAppendQueryParameter(queries, param, this.permissions);
+ break;
+ case "sig":
+ this.tryAppendQueryParameter(queries, param, this.signature);
+ break;
+ case "rscc":
+ this.tryAppendQueryParameter(queries, param, this.cacheControl);
+ break;
+ case "rscd":
+ this.tryAppendQueryParameter(queries, param, this.contentDisposition);
+ break;
+ case "rsce":
+ this.tryAppendQueryParameter(queries, param, this.contentEncoding);
+ break;
+ case "rscl":
+ this.tryAppendQueryParameter(queries, param, this.contentLanguage);
+ break;
+ case "rsct":
+ this.tryAppendQueryParameter(queries, param, this.contentType);
+ break;
+ }
+ }
+ return queries.join("&");
+ };
+ /**
+ * A private helper method used to filter and append query key/value pairs into an array.
+ *
+ * @private
+ * @param {string[]} queries
+ * @param {string} key
+ * @param {string} [value]
+ * @returns {void}
+ * @memberof SASQueryParameters
+ */
+ SASQueryParameters.prototype.tryAppendQueryParameter = function (queries, key, value) {
+ if (!value) {
+ return;
+ }
+ key = encodeURIComponent(key);
+ value = encodeURIComponent(value);
+ if (key.length > 0 && value.length > 0) {
+ queries.push(key + "=" + value);
+ }
+ };
+ return SASQueryParameters;
+}());
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ *
+ * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual
+ * REST request.
+ *
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas
+ *
+ * @param {AccountSASSignatureValues} accountSASSignatureValues
+ * @param {StorageSharedKeyCredential} sharedKeyCredential
+ * @returns {SASQueryParameters}
+ * @memberof AccountSASSignatureValues
+ */
+function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) {
+ var version = accountSASSignatureValues.version
+ ? accountSASSignatureValues.version
+ : SERVICE_VERSION;
+ if (accountSASSignatureValues.permissions &&
+ accountSASSignatureValues.permissions.deleteVersion &&
+ version < "2019-10-10") {
+ throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission.");
+ }
+ if (accountSASSignatureValues.permissions &&
+ accountSASSignatureValues.permissions.tag &&
+ version < "2019-12-12") {
+ throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission.");
+ }
+ if (accountSASSignatureValues.permissions &&
+ accountSASSignatureValues.permissions.filter &&
+ version < "2019-12-12") {
+ throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission.");
+ }
+ var parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString());
+ var parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString();
+ var parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString();
+ var stringToSign = [
+ sharedKeyCredential.accountName,
+ parsedPermissions,
+ parsedServices,
+ parsedResourceTypes,
+ accountSASSignatureValues.startsOn
+ ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false)
+ : "",
+ truncatedISO8061Date(accountSASSignatureValues.expiresOn, false),
+ accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "",
+ accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "",
+ version,
+ "" // Account SAS requires an additional newline character
+ ].join("\n");
+ var signature = sharedKeyCredential.computeHMACSHA256(stringToSign);
+ return new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange);
+}
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ *
+ * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting
+ * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all
+ * the values are set, this should be serialized with toString and set as the permissions field on a
+ * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but
+ * the order of the permissions is particular and this class guarantees correctness.
+ *
+ * @export
+ * @class BlobSASPermissions
+ */
+var BlobSASPermissions = /** @class */ (function () {
+ function BlobSASPermissions() {
+ /**
+ * Specifies Read access granted.
+ *
+ * @type {boolean}
+ * @memberof BlobSASPermissions
+ */
+ this.read = false;
+ /**
+ * Specifies Add access granted.
+ *
+ * @type {boolean}
+ * @memberof BlobSASPermissions
+ */
+ this.add = false;
+ /**
+ * Specifies Create access granted.
+ *
+ * @type {boolean}
+ * @memberof BlobSASPermissions
+ */
+ this.create = false;
+ /**
+ * Specifies Write access granted.
+ *
+ * @type {boolean}
+ * @memberof BlobSASPermissions
+ */
+ this.write = false;
+ /**
+ * Specifies Delete access granted.
+ *
+ * @type {boolean}
+ * @memberof BlobSASPermissions
+ */
+ this.delete = false;
+ /**
+ * Specifies Delete version access granted.
+ *
+ * @type {boolean}
+ * @memberof BlobSASPermissions
+ */
+ this.deleteVersion = false;
+ /**
+ * Specfies Tag access granted.
+ *
+ * @type {boolean}
+ * @memberof BlobSASPermissions
+ */
+ this.tag = false;
+ }
+ /**
+ * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an
+ * Error if it encounters a character that does not correspond to a valid permission.
+ *
+ * @static
+ * @param {string} permissions
+ * @returns {BlobSASPermissions}
+ * @memberof BlobSASPermissions
+ */
+ BlobSASPermissions.parse = function (permissions) {
+ var blobSASPermissions = new BlobSASPermissions();
+ for (var _i = 0, permissions_1 = permissions; _i < permissions_1.length; _i++) {
+ var char = permissions_1[_i];
+ switch (char) {
+ case "r":
+ blobSASPermissions.read = true;
+ break;
+ case "a":
+ blobSASPermissions.add = true;
+ break;
+ case "c":
+ blobSASPermissions.create = true;
+ break;
+ case "w":
+ blobSASPermissions.write = true;
+ break;
+ case "d":
+ blobSASPermissions.delete = true;
+ break;
+ case "x":
+ blobSASPermissions.deleteVersion = true;
+ break;
+ case "t":
+ blobSASPermissions.tag = true;
+ break;
+ default:
+ throw new RangeError("Invalid permission: " + char);
+ }
+ }
+ return blobSASPermissions;
+ };
+ /**
+ * Converts the given permissions to a string. Using this method will guarantee the permissions are in an
+ * order accepted by the service.
+ *
+ * @returns {string} A string which represents the BlobSASPermissions
+ * @memberof BlobSASPermissions
+ */
+ BlobSASPermissions.prototype.toString = function () {
+ var permissions = [];
+ if (this.read) {
+ permissions.push("r");
+ }
+ if (this.add) {
+ permissions.push("a");
+ }
+ if (this.create) {
+ permissions.push("c");
+ }
+ if (this.write) {
+ permissions.push("w");
+ }
+ if (this.delete) {
+ permissions.push("d");
+ }
+ if (this.deleteVersion) {
+ permissions.push("x");
+ }
+ if (this.tag) {
+ permissions.push("t");
+ }
+ return permissions.join("");
+ };
+ return BlobSASPermissions;
+}());
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+/**
+ * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container.
+ * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation.
+ * Once all the values are set, this should be serialized with toString and set as the permissions field on a
+ * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but
+ * the order of the permissions is particular and this class guarantees correctness.
+ *
+ * @export
+ * @class ContainerSASPermissions
+ */
+var ContainerSASPermissions = /** @class */ (function () {
+ function ContainerSASPermissions() {
+ /**
+ * Specifies Read access granted.
+ *
+ * @type {boolean}
+ * @memberof ContainerSASPermissions
+ */
+ this.read = false;
+ /**
+ * Specifies Add access granted.
+ *
+ * @type {boolean}
+ * @memberof ContainerSASPermissions
+ */
+ this.add = false;
+ /**
+ * Specifies Create access granted.
+ *
+ * @type {boolean}
+ * @memberof ContainerSASPermissions
+ */
+ this.create = false;
+ /**
+ * Specifies Write access granted.
+ *
+ * @type {boolean}
+ * @memberof ContainerSASPermissions
+ */
+ this.write = false;
+ /**
+ * Specifies Delete access granted.
+ *
+ * @type {boolean}
+ * @memberof ContainerSASPermissions
+ */
+ this.delete = false;
+ /**
+ * Specifies Delete version access granted.
+ *
+ * @type {boolean}
+ * @memberof ContainerSASPermissions
+ */
+ this.deleteVersion = false;
+ /**
+ * Specifies List access granted.
+ *
+ * @type {boolean}
+ * @memberof ContainerSASPermissions
+ */
+ this.list = false;
+ /**
+ * Specfies Tag access granted.
+ *
+ * @type {boolean}
+ * @memberof ContainerSASPermissions
+ */
+ this.tag = false;
+ }
+ /**
+ * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an
+ * Error if it encounters a character that does not correspond to a valid permission.
+ *
+ * @static
+ * @param {string} permissions
+ * @returns {ContainerSASPermissions}
+ * @memberof ContainerSASPermissions
+ */
+ ContainerSASPermissions.parse = function (permissions) {
+ var containerSASPermissions = new ContainerSASPermissions();
+ for (var _i = 0, permissions_1 = permissions; _i < permissions_1.length; _i++) {
+ var char = permissions_1[_i];
+ switch (char) {
+ case "r":
+ containerSASPermissions.read = true;
+ break;
+ case "a":
+ containerSASPermissions.add = true;
+ break;
+ case "c":
+ containerSASPermissions.create = true;
+ break;
+ case "w":
+ containerSASPermissions.write = true;
+ break;
+ case "d":
+ containerSASPermissions.delete = true;
+ break;
+ case "l":
+ containerSASPermissions.list = true;
+ break;
+ case "t":
+ containerSASPermissions.tag = true;
+ break;
+ case "x":
+ containerSASPermissions.deleteVersion = true;
+ break;
+ default:
+ throw new RangeError("Invalid permission " + char);
+ }
+ }
+ return containerSASPermissions;
+ };
+ /**
+ * Converts the given permissions to a string. Using this method will guarantee the permissions are in an
+ * order accepted by the service.
+ *
+ * The order of the characters should be as specified here to ensure correctness.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas
+ *
+ * @returns {string}
+ * @memberof ContainerSASPermissions
+ */
+ ContainerSASPermissions.prototype.toString = function () {
+ var permissions = [];
+ if (this.read) {
+ permissions.push("r");
+ }
+ if (this.add) {
+ permissions.push("a");
+ }
+ if (this.create) {
+ permissions.push("c");
+ }
+ if (this.write) {
+ permissions.push("w");
+ }
+ if (this.delete) {
+ permissions.push("d");
+ }
+ if (this.deleteVersion) {
+ permissions.push("x");
+ }
+ if (this.list) {
+ permissions.push("l");
+ }
+ if (this.tag) {
+ permissions.push("t");
+ }
+ return permissions.join("");
+ };
+ return ContainerSASPermissions;
+}());
+
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ *
+ * UserDelegationKeyCredential is only used for generation of user delegation SAS.
+ * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas
+ *
+ * @export
+ * @class UserDelegationKeyCredential
+ */
+var UserDelegationKeyCredential = /** @class */ (function () {
+ /**
+ * Creates an instance of UserDelegationKeyCredential.
+ * @param {string} accountName
+ * @param {UserDelegationKey} userDelegationKey
+ * @memberof UserDelegationKeyCredential
+ */
+ function UserDelegationKeyCredential(accountName, userDelegationKey) {
+ this.accountName = accountName;
+ this.userDelegationKey = userDelegationKey;
+ this.key = Buffer.from(userDelegationKey.value, "base64");
+ }
+ /**
+ * Generates a hash signature for an HTTP request or for a SAS.
+ *
+ * @param {string} stringToSign
+ * @returns {string}
+ * @memberof UserDelegationKeyCredential
+ */
+ UserDelegationKeyCredential.prototype.computeHMACSHA256 = function (stringToSign) {
+ // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`);
+ return crypto.createHmac("sha256", this.key)
+ .update(stringToSign, "utf8")
+ .digest("base64");
+ };
+ return UserDelegationKeyCredential;
+}());
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) {
+ var version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION;
+ var sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential
+ ? sharedKeyCredentialOrUserDelegationKey
+ : undefined;
+ var userDelegationKeyCredential;
+ if (sharedKeyCredential === undefined && accountName !== undefined) {
+ userDelegationKeyCredential = new UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey);
+ }
+ if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) {
+ throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName.");
+ }
+ // Version 2019-12-12 adds support for the blob tags permission.
+ // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields.
+ // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string
+ if (version >= "2018-11-09") {
+ if (sharedKeyCredential !== undefined) {
+ return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential);
+ }
+ else {
+ return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential);
+ }
+ }
+ if (version >= "2015-04-05") {
+ if (sharedKeyCredential !== undefined) {
+ return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential);
+ }
+ else {
+ throw new RangeError("'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key.");
+ }
+ }
+ throw new RangeError("'version' must be >= '2015-04-05'.");
+}
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ * IMPLEMENTATION FOR API VERSION FROM 2015-04-05 AND BEFORE 2018-11-09.
+ *
+ * Creates an instance of SASQueryParameters.
+ *
+ * Only accepts required settings needed to create a SAS. For optional settings please
+ * set corresponding properties directly, such as permissions, startsOn and identifier.
+ *
+ * WARNING: When identifier is not provided, permissions and expiresOn are required.
+ * You MUST assign value to identifier or expiresOn & permissions manually if you initial with
+ * this constructor.
+ *
+ * @param {BlobSASSignatureValues} blobSASSignatureValues
+ * @param {StorageSharedKeyCredential} sharedKeyCredential
+ * @returns {SASQueryParameters}
+ */
+function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) {
+ if (!blobSASSignatureValues.identifier &&
+ !blobSASSignatureValues.permissions &&
+ !blobSASSignatureValues.expiresOn) {
+ throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.");
+ }
+ var version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION;
+ var resource = "c";
+ var verifiedPermissions;
+ if (blobSASSignatureValues.snapshotTime) {
+ throw RangeError("'version' must be >= '2018-11-09' when provided 'snapshotTime'.");
+ }
+ if (blobSASSignatureValues.versionId) {
+ throw RangeError("'version' must be >= '2019-10-10' when provided 'versionId'.");
+ }
+ if (blobSASSignatureValues.permissions &&
+ blobSASSignatureValues.permissions.deleteVersion &&
+ version < "2019-10-10") {
+ throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission.");
+ }
+ if (blobSASSignatureValues.permissions &&
+ blobSASSignatureValues.permissions.tag &&
+ version < "2019-12-12") {
+ throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission.");
+ }
+ if (blobSASSignatureValues.blobName) {
+ resource = "b";
+ }
+ // Calling parse and toString guarantees the proper ordering and throws on invalid characters.
+ if (blobSASSignatureValues.permissions) {
+ if (blobSASSignatureValues.blobName) {
+ verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();
+ }
+ else {
+ verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();
+ }
+ }
+ // Signature is generated on the un-url-encoded values.
+ var stringToSign = [
+ verifiedPermissions ? verifiedPermissions : "",
+ blobSASSignatureValues.startsOn
+ ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)
+ : "",
+ blobSASSignatureValues.expiresOn
+ ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)
+ : "",
+ getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName),
+ blobSASSignatureValues.identifier,
+ blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "",
+ blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "",
+ version,
+ blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "",
+ blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "",
+ blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "",
+ blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "",
+ blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : ""
+ ].join("\n");
+ var signature = sharedKeyCredential.computeHMACSHA256(stringToSign);
+ return new SASQueryParameters(version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType);
+}
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ * IMPLEMENTATION FOR API VERSION FROM 2018-11-09.
+ *
+ * Creates an instance of SASQueryParameters.
+ *
+ * Only accepts required settings needed to create a SAS. For optional settings please
+ * set corresponding properties directly, such as permissions, startsOn and identifier.
+ *
+ * WARNING: When identifier is not provided, permissions and expiresOn are required.
+ * You MUST assign value to identifier or expiresOn & permissions manually if you initial with
+ * this constructor.
+ *
+ * @param {BlobSASSignatureValues} blobSASSignatureValues
+ * @param {StorageSharedKeyCredential} sharedKeyCredential
+ * @returns {SASQueryParameters}
+ */
+function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) {
+ if (!blobSASSignatureValues.identifier &&
+ !blobSASSignatureValues.permissions &&
+ !blobSASSignatureValues.expiresOn) {
+ throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.");
+ }
+ var version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION;
+ var resource = "c";
+ var verifiedPermissions;
+ if (blobSASSignatureValues.versionId && version < "2019-10-10") {
+ throw RangeError("'version' must be >= '2019-10-10' when provided 'versionId'.");
+ }
+ if (blobSASSignatureValues.permissions &&
+ blobSASSignatureValues.permissions.deleteVersion &&
+ version < "2019-10-10") {
+ throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission.");
+ }
+ if (blobSASSignatureValues.permissions &&
+ blobSASSignatureValues.permissions.tag &&
+ version < "2019-12-12") {
+ throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission.");
+ }
+ if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.snapshotTime) {
+ throw RangeError("Must provide 'blobName' when provided 'snapshotTime'.");
+ }
+ if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.versionId) {
+ throw RangeError("Must provide 'blobName' when provided 'versionId'.");
+ }
+ var timestamp = blobSASSignatureValues.snapshotTime;
+ if (blobSASSignatureValues.blobName) {
+ resource = "b";
+ if (blobSASSignatureValues.snapshotTime) {
+ resource = "bs";
+ }
+ else if (blobSASSignatureValues.versionId) {
+ resource = "bv";
+ timestamp = blobSASSignatureValues.versionId;
+ }
+ }
+ // Calling parse and toString guarantees the proper ordering and throws on invalid characters.
+ if (blobSASSignatureValues.permissions) {
+ if (blobSASSignatureValues.blobName) {
+ verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();
+ }
+ else {
+ verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();
+ }
+ }
+ // Signature is generated on the un-url-encoded values.
+ var stringToSign = [
+ verifiedPermissions ? verifiedPermissions : "",
+ blobSASSignatureValues.startsOn
+ ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)
+ : "",
+ blobSASSignatureValues.expiresOn
+ ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)
+ : "",
+ getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName),
+ blobSASSignatureValues.identifier,
+ blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "",
+ blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "",
+ version,
+ resource,
+ timestamp,
+ blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "",
+ blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "",
+ blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "",
+ blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "",
+ blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : ""
+ ].join("\n");
+ var signature = sharedKeyCredential.computeHMACSHA256(stringToSign);
+ return new SASQueryParameters(version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType);
+}
+/**
+ * ONLY AVAILABLE IN NODE.JS RUNTIME.
+ * IMPLEMENTATION FOR API VERSION FROM 2018-11-09.
+ *
+ * Creates an instance of SASQueryParameters.
+ *
+ * Only accepts required settings needed to create a SAS. For optional settings please
+ * set corresponding properties directly, such as permissions, startsOn and identifier.
+ *
+ * WARNING: identifier will be ignored, permissions and expiresOn are required.
+ *
+ * @param {BlobSASSignatureValues} blobSASSignatureValues
+ * @param {UserDelegationKeyCredential} userDelegationKeyCredential
+ * @returns {SASQueryParameters}
+ */
+function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) {
+ if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) {
+ throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS.");
+ }
+ var version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION;
+ if (blobSASSignatureValues.versionId && version < "2019-10-10") {
+ throw RangeError("'version' must be >= '2019-10-10' when provided 'versionId'.");
+ }
+ if (blobSASSignatureValues.permissions &&
+ blobSASSignatureValues.permissions.deleteVersion &&
+ version < "2019-10-10") {
+ throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission.");
+ }
+ if (blobSASSignatureValues.permissions &&
+ blobSASSignatureValues.permissions.tag &&
+ version < "2019-12-12") {
+ throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission.");
+ }
+ var resource = "c";
+ var verifiedPermissions;
+ if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.snapshotTime) {
+ throw RangeError("Must provide 'blobName' when provided 'snapshotTime'.");
+ }
+ if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.versionId) {
+ throw RangeError("Must provide 'blobName' when provided 'versionId'.");
+ }
+ var timestamp = blobSASSignatureValues.snapshotTime;
+ if (blobSASSignatureValues.blobName) {
+ resource = "b";
+ if (blobSASSignatureValues.snapshotTime) {
+ resource = "bs";
+ }
+ else if (blobSASSignatureValues.versionId) {
+ resource = "bv";
+ timestamp = blobSASSignatureValues.versionId;
+ }
+ }
+ // Calling parse and toString guarantees the proper ordering and throws on invalid characters.
+ if (blobSASSignatureValues.permissions) {
+ if (blobSASSignatureValues.blobName) {
+ verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();
+ }
+ else {
+ verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();
+ }
+ }
+ // Signature is generated on the un-url-encoded values.
+ var stringToSign = [
+ verifiedPermissions ? verifiedPermissions : "",
+ blobSASSignatureValues.startsOn
+ ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)
+ : "",
+ blobSASSignatureValues.expiresOn
+ ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)
+ : "",
+ getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName),
+ userDelegationKeyCredential.userDelegationKey.signedObjectId,
+ userDelegationKeyCredential.userDelegationKey.signedTenantId,
+ userDelegationKeyCredential.userDelegationKey.signedStartsOn
+ ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false)
+ : "",
+ userDelegationKeyCredential.userDelegationKey.signedExpiresOn
+ ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false)
+ : "",
+ userDelegationKeyCredential.userDelegationKey.signedService,
+ userDelegationKeyCredential.userDelegationKey.signedVersion,
+ blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "",
+ blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "",
+ version,
+ resource,
+ timestamp,
+ blobSASSignatureValues.cacheControl,
+ blobSASSignatureValues.contentDisposition,
+ blobSASSignatureValues.contentEncoding,
+ blobSASSignatureValues.contentLanguage,
+ blobSASSignatureValues.contentType
+ ].join("\n");
+ var signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign);
+ return new SASQueryParameters(version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey);
+}
+function getCanonicalName(accountName, containerName, blobName) {
+ // Container: "/blob/account/containerName"
+ // Blob: "/blob/account/containerName/blobName"
+ var elements = ["/blob/" + accountName + "/" + containerName];
+ if (blobName) {
+ elements.push("/" + blobName);
+ }
+ return elements.join("");
+}
+
+Object.defineProperty(exports, 'BaseRequestPolicy', {
+ enumerable: true,
+ get: function () {
+ return coreHttp.BaseRequestPolicy;
+ }
+});
+Object.defineProperty(exports, 'HttpHeaders', {
+ enumerable: true,
+ get: function () {
+ return coreHttp.HttpHeaders;
+ }
+});
+Object.defineProperty(exports, 'RequestPolicyOptions', {
+ enumerable: true,
+ get: function () {
+ return coreHttp.RequestPolicyOptions;
+ }
+});
+Object.defineProperty(exports, 'RestError', {
+ enumerable: true,
+ get: function () {
+ return coreHttp.RestError;
+ }
+});
+Object.defineProperty(exports, 'WebResource', {
+ enumerable: true,
+ get: function () {
+ return coreHttp.WebResource;
+ }
+});
+Object.defineProperty(exports, 'deserializationPolicy', {
+ enumerable: true,
+ get: function () {
+ return coreHttp.deserializationPolicy;
+ }
+});
+exports.AccountSASPermissions = AccountSASPermissions;
+exports.AccountSASResourceTypes = AccountSASResourceTypes;
+exports.AccountSASServices = AccountSASServices;
+exports.AnonymousCredential = AnonymousCredential;
+exports.AnonymousCredentialPolicy = AnonymousCredentialPolicy;
+exports.AppendBlobClient = AppendBlobClient;
+exports.BlobBatch = BlobBatch;
+exports.BlobBatchClient = BlobBatchClient;
+exports.BlobClient = BlobClient;
+exports.BlobLeaseClient = BlobLeaseClient;
+exports.BlobSASPermissions = BlobSASPermissions;
+exports.BlobServiceClient = BlobServiceClient;
+exports.BlockBlobClient = BlockBlobClient;
+exports.ContainerClient = ContainerClient;
+exports.ContainerSASPermissions = ContainerSASPermissions;
+exports.Credential = Credential;
+exports.CredentialPolicy = CredentialPolicy;
+exports.PageBlobClient = PageBlobClient;
+exports.Pipeline = Pipeline;
+exports.SASQueryParameters = SASQueryParameters;
+exports.StorageBrowserPolicy = StorageBrowserPolicy;
+exports.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory;
+exports.StorageOAuthScopes = StorageOAuthScopes;
+exports.StorageRetryPolicy = StorageRetryPolicy;
+exports.StorageRetryPolicyFactory = StorageRetryPolicyFactory;
+exports.StorageSharedKeyCredential = StorageSharedKeyCredential;
+exports.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy;
+exports.generateAccountSASQueryParameters = generateAccountSASQueryParameters;
+exports.generateBlobSASQueryParameters = generateBlobSASQueryParameters;
+exports.logger = logger;
+exports.newPipeline = newPipeline;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+/* 374 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+
+module.exports = __webpack_require__(990)
+
+
+/***/ }),
+/* 375 */,
+/* 376 */,
+/* 377 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+module.exports = function(Promise, INTERNAL) {
+var util = __webpack_require__(248);
+var errorObj = util.errorObj;
+var isObject = util.isObject;
+
+function tryConvertToPromise(obj, context) {
+ if (isObject(obj)) {
+ if (obj instanceof Promise) return obj;
+ var then = getThen(obj);
+ if (then === errorObj) {
+ if (context) context._pushContext();
+ var ret = Promise.reject(then.e);
+ if (context) context._popContext();
+ return ret;
+ } else if (typeof then === "function") {
+ if (isAnyBluebirdPromise(obj)) {
+ var ret = new Promise(INTERNAL);
+ obj._then(
+ ret._fulfill,
+ ret._reject,
+ undefined,
+ ret,
+ null
+ );
+ return ret;
+ }
+ return doThenable(obj, then, context);
+ }
+ }
+ return obj;
+}
+
+function doGetThen(obj) {
+ return obj.then;
+}
+
+function getThen(obj) {
+ try {
+ return doGetThen(obj);
+ } catch (e) {
+ errorObj.e = e;
+ return errorObj;
+ }
+}
+
+var hasProp = {}.hasOwnProperty;
+function isAnyBluebirdPromise(obj) {
+ try {
+ return hasProp.call(obj, "_promise0");
+ } catch (e) {
+ return false;
+ }
+}
+
+function doThenable(x, then, context) {
+ var promise = new Promise(INTERNAL);
+ var ret = promise;
+ if (context) context._pushContext();
+ promise._captureStackTrace();
+ if (context) context._popContext();
+ var synchronous = true;
+ var result = util.tryCatch(then).call(x, resolve, reject);
+ synchronous = false;
+
+ if (promise && result === errorObj) {
+ promise._rejectCallback(result.e, true, true);
+ promise = null;
+ }
+
+ function resolve(value) {
+ if (!promise) return;
+ promise._resolveCallback(value);
+ promise = null;
+ }
+
+ function reject(reason) {
+ if (!promise) return;
+ promise._rejectCallback(reason, synchronous, true);
+ promise = null;
+ }
+ return ret;
+}
+
+return tryConvertToPromise;
+};
+
+
+/***/ }),
+/* 378 */,
+/* 379 */,
+/* 380 */,
+/* 381 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+module.exports = Yallist
+
+Yallist.Node = Node
+Yallist.create = Yallist
+
+function Yallist (list) {
+ var self = this
+ if (!(self instanceof Yallist)) {
+ self = new Yallist()
+ }
+
+ self.tail = null
+ self.head = null
+ self.length = 0
+
+ if (list && typeof list.forEach === 'function') {
+ list.forEach(function (item) {
+ self.push(item)
+ })
+ } else if (arguments.length > 0) {
+ for (var i = 0, l = arguments.length; i < l; i++) {
+ self.push(arguments[i])
+ }
+ }
+
+ return self
+}
+
+Yallist.prototype.removeNode = function (node) {
+ if (node.list !== this) {
+ throw new Error('removing node which does not belong to this list')
+ }
+
+ var next = node.next
+ var prev = node.prev
+
+ if (next) {
+ next.prev = prev
+ }
+
+ if (prev) {
+ prev.next = next
+ }
+
+ if (node === this.head) {
+ this.head = next
+ }
+ if (node === this.tail) {
+ this.tail = prev
+ }
+
+ node.list.length--
+ node.next = null
+ node.prev = null
+ node.list = null
+
+ return next
+}
+
+Yallist.prototype.unshiftNode = function (node) {
+ if (node === this.head) {
+ return
+ }
+
+ if (node.list) {
+ node.list.removeNode(node)
+ }
+
+ var head = this.head
+ node.list = this
+ node.next = head
+ if (head) {
+ head.prev = node
+ }
+
+ this.head = node
+ if (!this.tail) {
+ this.tail = node
+ }
+ this.length++
+}
+
+Yallist.prototype.pushNode = function (node) {
+ if (node === this.tail) {
+ return
+ }
+
+ if (node.list) {
+ node.list.removeNode(node)
+ }
+
+ var tail = this.tail
+ node.list = this
+ node.prev = tail
+ if (tail) {
+ tail.next = node
+ }
+
+ this.tail = node
+ if (!this.head) {
+ this.head = node
+ }
+ this.length++
+}
+
+Yallist.prototype.push = function () {
+ for (var i = 0, l = arguments.length; i < l; i++) {
+ push(this, arguments[i])
+ }
+ return this.length
+}
+
+Yallist.prototype.unshift = function () {
+ for (var i = 0, l = arguments.length; i < l; i++) {
+ unshift(this, arguments[i])
+ }
+ return this.length
+}
+
+Yallist.prototype.pop = function () {
+ if (!this.tail) {
+ return undefined
+ }
+
+ var res = this.tail.value
+ this.tail = this.tail.prev
+ if (this.tail) {
+ this.tail.next = null
+ } else {
+ this.head = null
+ }
+ this.length--
+ return res
+}
+
+Yallist.prototype.shift = function () {
+ if (!this.head) {
+ return undefined
+ }
+
+ var res = this.head.value
+ this.head = this.head.next
+ if (this.head) {
+ this.head.prev = null
+ } else {
+ this.tail = null
+ }
+ this.length--
+ return res
+}
+
+Yallist.prototype.forEach = function (fn, thisp) {
+ thisp = thisp || this
+ for (var walker = this.head, i = 0; walker !== null; i++) {
+ fn.call(thisp, walker.value, i, this)
+ walker = walker.next
+ }
+}
+
+Yallist.prototype.forEachReverse = function (fn, thisp) {
+ thisp = thisp || this
+ for (var walker = this.tail, i = this.length - 1; walker !== null; i--) {
+ fn.call(thisp, walker.value, i, this)
+ walker = walker.prev
+ }
+}
+
+Yallist.prototype.get = function (n) {
+ for (var i = 0, walker = this.head; walker !== null && i < n; i++) {
+ // abort out of the list early if we hit a cycle
+ walker = walker.next
+ }
+ if (i === n && walker !== null) {
+ return walker.value
+ }
+}
+
+Yallist.prototype.getReverse = function (n) {
+ for (var i = 0, walker = this.tail; walker !== null && i < n; i++) {
+ // abort out of the list early if we hit a cycle
+ walker = walker.prev
+ }
+ if (i === n && walker !== null) {
+ return walker.value
+ }
+}
+
+Yallist.prototype.map = function (fn, thisp) {
+ thisp = thisp || this
+ var res = new Yallist()
+ for (var walker = this.head; walker !== null;) {
+ res.push(fn.call(thisp, walker.value, this))
+ walker = walker.next
+ }
+ return res
+}
+
+Yallist.prototype.mapReverse = function (fn, thisp) {
+ thisp = thisp || this
+ var res = new Yallist()
+ for (var walker = this.tail; walker !== null;) {
+ res.push(fn.call(thisp, walker.value, this))
+ walker = walker.prev
+ }
+ return res
+}
+
+Yallist.prototype.reduce = function (fn, initial) {
+ var acc
+ var walker = this.head
+ if (arguments.length > 1) {
+ acc = initial
+ } else if (this.head) {
+ walker = this.head.next
+ acc = this.head.value
+ } else {
+ throw new TypeError('Reduce of empty list with no initial value')
+ }
+
+ for (var i = 0; walker !== null; i++) {
+ acc = fn(acc, walker.value, i)
+ walker = walker.next
+ }
+
+ return acc
+}
+
+Yallist.prototype.reduceReverse = function (fn, initial) {
+ var acc
+ var walker = this.tail
+ if (arguments.length > 1) {
+ acc = initial
+ } else if (this.tail) {
+ walker = this.tail.prev
+ acc = this.tail.value
+ } else {
+ throw new TypeError('Reduce of empty list with no initial value')
+ }
+
+ for (var i = this.length - 1; walker !== null; i--) {
+ acc = fn(acc, walker.value, i)
+ walker = walker.prev
+ }
+
+ return acc
+}
+
+Yallist.prototype.toArray = function () {
+ var arr = new Array(this.length)
+ for (var i = 0, walker = this.head; walker !== null; i++) {
+ arr[i] = walker.value
+ walker = walker.next
+ }
+ return arr
+}
+
+Yallist.prototype.toArrayReverse = function () {
+ var arr = new Array(this.length)
+ for (var i = 0, walker = this.tail; walker !== null; i++) {
+ arr[i] = walker.value
+ walker = walker.prev
+ }
+ return arr
+}
+
+Yallist.prototype.slice = function (from, to) {
+ to = to || this.length
+ if (to < 0) {
+ to += this.length
+ }
+ from = from || 0
+ if (from < 0) {
+ from += this.length
+ }
+ var ret = new Yallist()
+ if (to < from || to < 0) {
+ return ret
+ }
+ if (from < 0) {
+ from = 0
+ }
+ if (to > this.length) {
+ to = this.length
+ }
+ for (var i = 0, walker = this.head; walker !== null && i < from; i++) {
+ walker = walker.next
+ }
+ for (; walker !== null && i < to; i++, walker = walker.next) {
+ ret.push(walker.value)
+ }
+ return ret
+}
+
+Yallist.prototype.sliceReverse = function (from, to) {
+ to = to || this.length
+ if (to < 0) {
+ to += this.length
+ }
+ from = from || 0
+ if (from < 0) {
+ from += this.length
+ }
+ var ret = new Yallist()
+ if (to < from || to < 0) {
+ return ret
+ }
+ if (from < 0) {
+ from = 0
+ }
+ if (to > this.length) {
+ to = this.length
+ }
+ for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) {
+ walker = walker.prev
+ }
+ for (; walker !== null && i > from; i--, walker = walker.prev) {
+ ret.push(walker.value)
+ }
+ return ret
+}
+
+Yallist.prototype.splice = function (start, deleteCount, ...nodes) {
+ if (start > this.length) {
+ start = this.length - 1
+ }
+ if (start < 0) {
+ start = this.length + start;
+ }
+
+ for (var i = 0, walker = this.head; walker !== null && i < start; i++) {
+ walker = walker.next
+ }
+
+ var ret = []
+ for (var i = 0; walker && i < deleteCount; i++) {
+ ret.push(walker.value)
+ walker = this.removeNode(walker)
+ }
+ if (walker === null) {
+ walker = this.tail
+ }
+
+ if (walker !== this.head && walker !== this.tail) {
+ walker = walker.prev
+ }
+
+ for (var i = 0; i < nodes.length; i++) {
+ walker = insert(this, walker, nodes[i])
+ }
+ return ret;
+}
+
+Yallist.prototype.reverse = function () {
+ var head = this.head
+ var tail = this.tail
+ for (var walker = head; walker !== null; walker = walker.prev) {
+ var p = walker.prev
+ walker.prev = walker.next
+ walker.next = p
+ }
+ this.head = tail
+ this.tail = head
+ return this
+}
+
+function insert (self, node, value) {
+ var inserted = node === self.head ?
+ new Node(value, null, node, self) :
+ new Node(value, node, node.next, self)
+
+ if (inserted.next === null) {
+ self.tail = inserted
+ }
+ if (inserted.prev === null) {
+ self.head = inserted
+ }
+
+ self.length++
+
+ return inserted
+}
+
+function push (self, item) {
+ self.tail = new Node(item, self.tail, null, self)
+ if (!self.head) {
+ self.head = self.tail
+ }
+ self.length++
+}
+
+function unshift (self, item) {
+ self.head = new Node(item, null, self.head, self)
+ if (!self.tail) {
+ self.tail = self.head
+ }
+ self.length++
+}
+
+function Node (value, prev, next, list) {
+ if (!(this instanceof Node)) {
+ return new Node(value, prev, next, list)
+ }
+
+ this.list = list
+ this.value = value
+
+ if (prev) {
+ prev.next = this
+ this.prev = prev
+ } else {
+ this.prev = null
+ }
+
+ if (next) {
+ next.prev = this
+ this.next = next
+ } else {
+ this.next = null
+ }
+}
+
+try {
+ // add if support for Symbol.iterator is present
+ __webpack_require__(135)(Yallist)
+} catch (er) {}
+
+
+/***/ }),
+/* 382 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+
+var iconvLite = __webpack_require__(841);
+
+// Expose to the world
+module.exports.convert = convert;
+
+/**
+ * Convert encoding of an UTF-8 string or a buffer
+ *
+ * @param {String|Buffer} str String to be converted
+ * @param {String} to Encoding to be converted to
+ * @param {String} [from='UTF-8'] Encoding to be converted from
+ * @return {Buffer} Encoded string
+ */
+function convert(str, to, from) {
+ from = checkEncoding(from || 'UTF-8');
+ to = checkEncoding(to || 'UTF-8');
+ str = str || '';
+
+ var result;
+
+ if (from !== 'UTF-8' && typeof str === 'string') {
+ str = Buffer.from(str, 'binary');
+ }
+
+ if (from === to) {
+ if (typeof str === 'string') {
+ result = Buffer.from(str);
+ } else {
+ result = str;
+ }
+ } else {
+ try {
+ result = convertIconvLite(str, to, from);
+ } catch (E) {
+ console.error(E);
+ result = str;
+ }
+ }
+
+ if (typeof result === 'string') {
+ result = Buffer.from(result, 'utf-8');
+ }
+
+ return result;
+}
+
+/**
+ * Convert encoding of astring with iconv-lite
+ *
+ * @param {String|Buffer} str String to be converted
+ * @param {String} to Encoding to be converted to
+ * @param {String} [from='UTF-8'] Encoding to be converted from
+ * @return {Buffer} Encoded string
+ */
+function convertIconvLite(str, to, from) {
+ if (to === 'UTF-8') {
+ return iconvLite.decode(str, from);
+ } else if (from === 'UTF-8') {
+ return iconvLite.encode(str, to);
+ } else {
+ return iconvLite.encode(iconvLite.decode(str, from), to);
+ }
+}
+
+/**
+ * Converts charset name if needed
+ *
+ * @param {String} name Character set
+ * @return {String} Character set name
+ */
+function checkEncoding(name) {
+ return (name || '')
+ .toString()
+ .trim()
+ .replace(/^latin[\-_]?(\d+)$/i, 'ISO-8859-$1')
+ .replace(/^win(?:dows)?[\-_]?(\d+)$/i, 'WINDOWS-$1')
+ .replace(/^utf[\-_]?(\d+)$/i, 'UTF-$1')
+ .replace(/^ks_c_5601\-1987$/i, 'CP949')
+ .replace(/^us[\-_]?ascii$/i, 'ASCII')
+ .toUpperCase();
+}
+
+
+/***/ }),
+/* 383 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", { value: true });
+const assert = __webpack_require__(357);
+const path = __webpack_require__(622);
+const pathHelper = __webpack_require__(972);
+const IS_WINDOWS = process.platform === 'win32';
+/**
+ * Helper class for parsing paths into segments
+ */
+class Path {
+ /**
+ * Constructs a Path
+ * @param itemPath Path or array of segments
+ */
+ constructor(itemPath) {
+ this.segments = [];
+ // String
+ if (typeof itemPath === 'string') {
+ assert(itemPath, `Parameter 'itemPath' must not be empty`);
+ // Normalize slashes and trim unnecessary trailing slash
+ itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
+ // Not rooted
+ if (!pathHelper.hasRoot(itemPath)) {
+ this.segments = itemPath.split(path.sep);
+ }
+ // Rooted
+ else {
+ // Add all segments, while not at the root
+ let remaining = itemPath;
+ let dir = pathHelper.dirname(remaining);
+ while (dir !== remaining) {
+ // Add the segment
+ const basename = path.basename(remaining);
+ this.segments.unshift(basename);
+ // Truncate the last segment
+ remaining = dir;
+ dir = pathHelper.dirname(remaining);
+ }
+ // Remainder is the root
+ this.segments.unshift(remaining);
+ }
+ }
+ // Array
+ else {
+ // Must not be empty
+ assert(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);
+ // Each segment
+ for (let i = 0; i < itemPath.length; i++) {
+ let segment = itemPath[i];
+ // Must not be empty
+ assert(segment, `Parameter 'itemPath' must not contain any empty segments`);
+ // Normalize slashes
+ segment = pathHelper.normalizeSeparators(itemPath[i]);
+ // Root segment
+ if (i === 0 && pathHelper.hasRoot(segment)) {
+ segment = pathHelper.safeTrimTrailingSeparator(segment);
+ assert(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);
+ this.segments.push(segment);
+ }
+ // All other segments
+ else {
+ // Must not contain slash
+ assert(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);
+ this.segments.push(segment);
+ }
+ }
+ }
+ }
+ /**
+ * Converts the path to it's string representation
+ */
+ toString() {
+ // First segment
+ let result = this.segments[0];
+ // All others
+ let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result));
+ for (let i = 1; i < this.segments.length; i++) {
+ if (skipSlash) {
+ skipSlash = false;
+ }
+ else {
+ result += path.sep;
+ }
+ result += this.segments[i];
+ }
+ return result;
+ }
+}
+exports.Path = Path;
+//# sourceMappingURL=internal-path.js.map
+
+/***/ }),
+/* 384 */
+/***/ (function(module) {
+
+"use strict";
+
+
+module.exports.isObjectProto = isObjectProto
+function isObjectProto (obj) {
+ return obj === Object.prototype
+}
+
+const _null = {}
+const _undefined = {}
+const Bool = Boolean
+const Num = Number
+const Str = String
+const boolCache = {
+ true: new Bool(true),
+ false: new Bool(false)
+}
+const numCache = {}
+const strCache = {}
+
+/*
+ * Returns a useful dispatch object for value using a process similar to
+ * the ToObject operation specified in http://es5.github.com/#x9.9
+ */
+module.exports.dispatchableObject = dispatchableObject
+function dispatchableObject (value) {
+ // To shut up jshint, which doesn't let me turn off this warning.
+ const Obj = Object
+ if (value === null) { return _null }
+ if (value === undefined) { return _undefined }
+ switch (typeof value) {
+ case 'object': return value
+ case 'boolean': return boolCache[value]
+ case 'number': return numCache[value] || (numCache[value] = new Num(value))
+ case 'string': return strCache[value] || (strCache[value] = new Str(value))
+ default: return new Obj(value)
+ }
+}
+
+
+/***/ }),
+/* 385 */,
+/* 386 */,
+/* 387 */
+/***/ (function(module) {
+
+module.exports = {"name":"node-gyp","description":"Node.js native addon build tool","license":"MIT","keywords":["native","addon","module","c","c++","bindings","gyp"],"version":"5.1.1","installVersion":9,"author":"Nathan Rajlich (http://tootallnate.net)","repository":{"type":"git","url":"git://github.com/nodejs/node-gyp.git"},"preferGlobal":true,"bin":"./bin/node-gyp.js","main":"./lib/node-gyp.js","dependencies":{"env-paths":"^2.2.0","glob":"^7.1.4","graceful-fs":"^4.2.2","mkdirp":"^0.5.1","nopt":"^4.0.1","npmlog":"^4.1.2","request":"^2.88.0","rimraf":"^2.6.3","semver":"^5.7.1","tar":"^4.4.12","which":"^1.3.1"},"engines":{"node":">= 6.0.0"},"devDependencies":{"bindings":"^1.5.0","nan":"^2.14.0","require-inject":"^1.4.4","standard":"^14.3.1","tap":"~12.7.0"},"scripts":{"lint":"standard */*.js test/**/*.js","test":"npm run lint && tap --timeout=120 test/test-*"}};
+
+/***/ }),
+/* 388 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+/**
+ * Detect Electron renderer process, which is node, but we should
+ * treat as a browser.
+ */
+
+if (typeof process === 'undefined' || process.type === 'renderer') {
+ module.exports = __webpack_require__(592);
+} else {
+ module.exports = __webpack_require__(161);
+}
+
+
+/***/ }),
+/* 389 */,
+/* 390 */,
+/* 391 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+var current = (process.versions && process.versions.node && process.versions.node.split('.')) || [];
+
+function specifierIncluded(specifier) {
+ var parts = specifier.split(' ');
+ var op = parts.length > 1 ? parts[0] : '=';
+ var versionParts = (parts.length > 1 ? parts[1] : parts[0]).split('.');
+
+ for (var i = 0; i < 3; ++i) {
+ var cur = Number(current[i] || 0);
+ var ver = Number(versionParts[i] || 0);
+ if (cur === ver) {
+ continue; // eslint-disable-line no-restricted-syntax, no-continue
+ }
+ if (op === '<') {
+ return cur < ver;
+ } else if (op === '>=') {
+ return cur >= ver;
+ } else {
+ return false;
+ }
+ }
+ return op === '>=';
+}
+
+function matchesRange(range) {
+ var specifiers = range.split(/ ?&& ?/);
+ if (specifiers.length === 0) { return false; }
+ for (var i = 0; i < specifiers.length; ++i) {
+ if (!specifierIncluded(specifiers[i])) { return false; }
+ }
+ return true;
+}
+
+function versionIncluded(specifierValue) {
+ if (typeof specifierValue === 'boolean') { return specifierValue; }
+ if (specifierValue && typeof specifierValue === 'object') {
+ for (var i = 0; i < specifierValue.length; ++i) {
+ if (matchesRange(specifierValue[i])) { return true; }
+ }
+ return false;
+ }
+ return matchesRange(specifierValue);
+}
+
+var data = __webpack_require__(656);
+
+var core = {};
+for (var mod in data) { // eslint-disable-line no-restricted-syntax
+ if (Object.prototype.hasOwnProperty.call(data, mod)) {
+ core[mod] = versionIncluded(data[mod]);
+ }
+}
+module.exports = core;
+
+
+/***/ }),
+/* 392 */,
+/* 393 */
+/***/ (function(__unusedmodule, exports, __webpack_require__) {
+
+"use strict";
+/*!
+ * Copyright (c) 2015, Salesforce.com, Inc.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ *
+ * 3. Neither the name of Salesforce.com nor the names of its contributors may
+ * be used to endorse or promote products derived from this software without
+ * specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+
+const punycode = __webpack_require__(213);
+const urlParse = __webpack_require__(835).parse;
+const util = __webpack_require__(669);
+const pubsuffix = __webpack_require__(562);
+const Store = __webpack_require__(338).Store;
+const MemoryCookieStore = __webpack_require__(332).MemoryCookieStore;
+const pathMatch = __webpack_require__(348).pathMatch;
+const VERSION = __webpack_require__(460);
+const { fromCallback } = __webpack_require__(147);
+
+// From RFC6265 S4.1.1
+// note that it excludes \x3B ";"
+const COOKIE_OCTETS = /^[\x21\x23-\x2B\x2D-\x3A\x3C-\x5B\x5D-\x7E]+$/;
+
+const CONTROL_CHARS = /[\x00-\x1F]/;
+
+// From Chromium // '\r', '\n' and '\0' should be treated as a terminator in
+// the "relaxed" mode, see:
+// https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/parsed_cookie.cc#L60
+const TERMINATORS = ["\n", "\r", "\0"];
+
+// RFC6265 S4.1.1 defines path value as 'any CHAR except CTLs or ";"'
+// Note ';' is \x3B
+const PATH_VALUE = /[\x20-\x3A\x3C-\x7E]+/;
+
+// date-time parsing constants (RFC6265 S5.1.1)
+
+const DATE_DELIM = /[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]/;
+
+const MONTH_TO_NUM = {
+ jan: 0,
+ feb: 1,
+ mar: 2,
+ apr: 3,
+ may: 4,
+ jun: 5,
+ jul: 6,
+ aug: 7,
+ sep: 8,
+ oct: 9,
+ nov: 10,
+ dec: 11
+};
+
+const MAX_TIME = 2147483647000; // 31-bit max
+const MIN_TIME = 0; // 31-bit min
+const SAME_SITE_CONTEXT_VAL_ERR =
+ 'Invalid sameSiteContext option for getCookies(); expected one of "strict", "lax", or "none"';
+
+function checkSameSiteContext(value) {
+ const context = String(value).toLowerCase();
+ if (context === "none" || context === "lax" || context === "strict") {
+ return context;
+ } else {
+ return null;
+ }
+}
+
+const PrefixSecurityEnum = Object.freeze({
+ SILENT: "silent",
+ STRICT: "strict",
+ DISABLED: "unsafe-disabled"
+});
+
+// Dumped from ip-regex@4.0.0, with the following changes:
+// * all capturing groups converted to non-capturing -- "(?:)"
+// * support for IPv6 Scoped Literal ("%eth1") removed
+// * lowercase hexadecimal only
+var IP_REGEX_LOWERCASE =/(?:^(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}$)|(?:^(?:(?:[a-f\d]{1,4}:){7}(?:[a-f\d]{1,4}|:)|(?:[a-f\d]{1,4}:){6}(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|:[a-f\d]{1,4}|:)|(?:[a-f\d]{1,4}:){5}(?::(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,2}|:)|(?:[a-f\d]{1,4}:){4}(?:(?::[a-f\d]{1,4}){0,1}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,3}|:)|(?:[a-f\d]{1,4}:){3}(?:(?::[a-f\d]{1,4}){0,2}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,4}|:)|(?:[a-f\d]{1,4}:){2}(?:(?::[a-f\d]{1,4}){0,3}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,5}|:)|(?:[a-f\d]{1,4}:){1}(?:(?::[a-f\d]{1,4}){0,4}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,6}|:)|(?::(?:(?::[a-f\d]{1,4}){0,5}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,7}|:)))$)/;
+
+/*
+ * Parses a Natural number (i.e., non-negative integer) with either the
+ * *DIGIT ( non-digit *OCTET )
+ * or
+ * *DIGIT
+ * grammar (RFC6265 S5.1.1).
+ *
+ * The "trailingOK" boolean controls if the grammar accepts a
+ * "( non-digit *OCTET )" trailer.
+ */
+function parseDigits(token, minDigits, maxDigits, trailingOK) {
+ let count = 0;
+ while (count < token.length) {
+ const c = token.charCodeAt(count);
+ // "non-digit = %x00-2F / %x3A-FF"
+ if (c <= 0x2f || c >= 0x3a) {
+ break;
+ }
+ count++;
+ }
+
+ // constrain to a minimum and maximum number of digits.
+ if (count < minDigits || count > maxDigits) {
+ return null;
+ }
+
+ if (!trailingOK && count != token.length) {
+ return null;
+ }
+
+ return parseInt(token.substr(0, count), 10);
+}
+
+function parseTime(token) {
+ const parts = token.split(":");
+ const result = [0, 0, 0];
+
+ /* RF6256 S5.1.1:
+ * time = hms-time ( non-digit *OCTET )
+ * hms-time = time-field ":" time-field ":" time-field
+ * time-field = 1*2DIGIT
+ */
+
+ if (parts.length !== 3) {
+ return null;
+ }
+
+ for (let i = 0; i < 3; i++) {
+ // "time-field" must be strictly "1*2DIGIT", HOWEVER, "hms-time" can be
+ // followed by "( non-digit *OCTET )" so therefore the last time-field can
+ // have a trailer
+ const trailingOK = i == 2;
+ const num = parseDigits(parts[i], 1, 2, trailingOK);
+ if (num === null) {
+ return null;
+ }
+ result[i] = num;
+ }
+
+ return result;
+}
+
+function parseMonth(token) {
+ token = String(token)
+ .substr(0, 3)
+ .toLowerCase();
+ const num = MONTH_TO_NUM[token];
+ return num >= 0 ? num : null;
+}
+
+/*
+ * RFC6265 S5.1.1 date parser (see RFC for full grammar)
+ */
+function parseDate(str) {
+ if (!str) {
+ return;
+ }
+
+ /* RFC6265 S5.1.1:
+ * 2. Process each date-token sequentially in the order the date-tokens
+ * appear in the cookie-date
+ */
+ const tokens = str.split(DATE_DELIM);
+ if (!tokens) {
+ return;
+ }
+
+ let hour = null;
+ let minute = null;
+ let second = null;
+ let dayOfMonth = null;
+ let month = null;
+ let year = null;
+
+ for (let i = 0; i < tokens.length; i++) {
+ const token = tokens[i].trim();
+ if (!token.length) {
+ continue;
+ }
+
+ let result;
+
+ /* 2.1. If the found-time flag is not set and the token matches the time
+ * production, set the found-time flag and set the hour- value,
+ * minute-value, and second-value to the numbers denoted by the digits in
+ * the date-token, respectively. Skip the remaining sub-steps and continue
+ * to the next date-token.
+ */
+ if (second === null) {
+ result = parseTime(token);
+ if (result) {
+ hour = result[0];
+ minute = result[1];
+ second = result[2];
+ continue;
+ }
+ }
+
+ /* 2.2. If the found-day-of-month flag is not set and the date-token matches
+ * the day-of-month production, set the found-day-of- month flag and set
+ * the day-of-month-value to the number denoted by the date-token. Skip
+ * the remaining sub-steps and continue to the next date-token.
+ */
+ if (dayOfMonth === null) {
+ // "day-of-month = 1*2DIGIT ( non-digit *OCTET )"
+ result = parseDigits(token, 1, 2, true);
+ if (result !== null) {
+ dayOfMonth = result;
+ continue;
+ }
+ }
+
+ /* 2.3. If the found-month flag is not set and the date-token matches the
+ * month production, set the found-month flag and set the month-value to
+ * the month denoted by the date-token. Skip the remaining sub-steps and
+ * continue to the next date-token.
+ */
+ if (month === null) {
+ result = parseMonth(token);
+ if (result !== null) {
+ month = result;
+ continue;
+ }
+ }
+
+ /* 2.4. If the found-year flag is not set and the date-token matches the
+ * year production, set the found-year flag and set the year-value to the
+ * number denoted by the date-token. Skip the remaining sub-steps and
+ * continue to the next date-token.
+ */
+ if (year === null) {
+ // "year = 2*4DIGIT ( non-digit *OCTET )"
+ result = parseDigits(token, 2, 4, true);
+ if (result !== null) {
+ year = result;
+ /* From S5.1.1:
+ * 3. If the year-value is greater than or equal to 70 and less
+ * than or equal to 99, increment the year-value by 1900.
+ * 4. If the year-value is greater than or equal to 0 and less
+ * than or equal to 69, increment the year-value by 2000.
+ */
+ if (year >= 70 && year <= 99) {
+ year += 1900;
+ } else if (year >= 0 && year <= 69) {
+ year += 2000;
+ }
+ }
+ }
+ }
+
+ /* RFC 6265 S5.1.1
+ * "5. Abort these steps and fail to parse the cookie-date if:
+ * * at least one of the found-day-of-month, found-month, found-
+ * year, or found-time flags is not set,
+ * * the day-of-month-value is less than 1 or greater than 31,
+ * * the year-value is less than 1601,
+ * * the hour-value is greater than 23,
+ * * the minute-value is greater than 59, or
+ * * the second-value is greater than 59.
+ * (Note that leap seconds cannot be represented in this syntax.)"
+ *
+ * So, in order as above:
+ */
+ if (
+ dayOfMonth === null ||
+ month === null ||
+ year === null ||
+ second === null ||
+ dayOfMonth < 1 ||
+ dayOfMonth > 31 ||
+ year < 1601 ||
+ hour > 23 ||
+ minute > 59 ||
+ second > 59
+ ) {
+ return;
+ }
+
+ return new Date(Date.UTC(year, month, dayOfMonth, hour, minute, second));
+}
+
+function formatDate(date) {
+ return date.toUTCString();
+}
+
+// S5.1.2 Canonicalized Host Names
+function canonicalDomain(str) {
+ if (str == null) {
+ return null;
+ }
+ str = str.trim().replace(/^\./, ""); // S4.1.2.3 & S5.2.3: ignore leading .
+
+ // convert to IDN if any non-ASCII characters
+ if (punycode && /[^\u0001-\u007f]/.test(str)) {
+ str = punycode.toASCII(str);
+ }
+
+ return str.toLowerCase();
+}
+
+// S5.1.3 Domain Matching
+function domainMatch(str, domStr, canonicalize) {
+ if (str == null || domStr == null) {
+ return null;
+ }
+ if (canonicalize !== false) {
+ str = canonicalDomain(str);
+ domStr = canonicalDomain(domStr);
+ }
+
+ /*
+ * S5.1.3:
+ * "A string domain-matches a given domain string if at least one of the
+ * following conditions hold:"
+ *
+ * " o The domain string and the string are identical. (Note that both the
+ * domain string and the string will have been canonicalized to lower case at
+ * this point)"
+ */
+ if (str == domStr) {
+ return true;
+ }
+
+ /* " o All of the following [three] conditions hold:" */
+
+ /* "* The domain string is a suffix of the string" */
+ const idx = str.indexOf(domStr);
+ if (idx <= 0) {
+ return false; // it's a non-match (-1) or prefix (0)
+ }
+
+ // next, check it's a proper suffix
+ // e.g., "a.b.c".indexOf("b.c") === 2
+ // 5 === 3+2
+ if (str.length !== domStr.length + idx) {
+ return false; // it's not a suffix
+ }
+
+ /* " * The last character of the string that is not included in the
+ * domain string is a %x2E (".") character." */
+ if (str.substr(idx-1,1) !== '.') {
+ return false; // doesn't align on "."
+ }
+
+ /* " * The string is a host name (i.e., not an IP address)." */
+ if (IP_REGEX_LOWERCASE.test(str)) {
+ return false; // it's an IP address
+ }
+
+ return true;
+}
+
+// RFC6265 S5.1.4 Paths and Path-Match
+
+/*
+ * "The user agent MUST use an algorithm equivalent to the following algorithm
+ * to compute the default-path of a cookie:"
+ *
+ * Assumption: the path (and not query part or absolute uri) is passed in.
+ */
+function defaultPath(path) {
+ // "2. If the uri-path is empty or if the first character of the uri-path is not
+ // a %x2F ("/") character, output %x2F ("/") and skip the remaining steps.
+ if (!path || path.substr(0, 1) !== "/") {
+ return "/";
+ }
+
+ // "3. If the uri-path contains no more than one %x2F ("/") character, output
+ // %x2F ("/") and skip the remaining step."
+ if (path === "/") {
+ return path;
+ }
+
+ const rightSlash = path.lastIndexOf("/");
+ if (rightSlash === 0) {
+ return "/";
+ }
+
+ // "4. Output the characters of the uri-path from the first character up to,
+ // but not including, the right-most %x2F ("/")."
+ return path.slice(0, rightSlash);
+}
+
+function trimTerminator(str) {
+ for (let t = 0; t < TERMINATORS.length; t++) {
+ const terminatorIdx = str.indexOf(TERMINATORS[t]);
+ if (terminatorIdx !== -1) {
+ str = str.substr(0, terminatorIdx);
+ }
+ }
+
+ return str;
+}
+
+function parseCookiePair(cookiePair, looseMode) {
+ cookiePair = trimTerminator(cookiePair);
+
+ let firstEq = cookiePair.indexOf("=");
+ if (looseMode) {
+ if (firstEq === 0) {
+ // '=' is immediately at start
+ cookiePair = cookiePair.substr(1);
+ firstEq = cookiePair.indexOf("="); // might still need to split on '='
+ }
+ } else {
+ // non-loose mode
+ if (firstEq <= 0) {
+ // no '=' or is at start
+ return; // needs to have non-empty "cookie-name"
+ }
+ }
+
+ let cookieName, cookieValue;
+ if (firstEq <= 0) {
+ cookieName = "";
+ cookieValue = cookiePair.trim();
+ } else {
+ cookieName = cookiePair.substr(0, firstEq).trim();
+ cookieValue = cookiePair.substr(firstEq + 1).trim();
+ }
+
+ if (CONTROL_CHARS.test(cookieName) || CONTROL_CHARS.test(cookieValue)) {
+ return;
+ }
+
+ const c = new Cookie();
+ c.key = cookieName;
+ c.value = cookieValue;
+ return c;
+}
+
+function parse(str, options) {
+ if (!options || typeof options !== "object") {
+ options = {};
+ }
+ str = str.trim();
+
+ // We use a regex to parse the "name-value-pair" part of S5.2
+ const firstSemi = str.indexOf(";"); // S5.2 step 1
+ const cookiePair = firstSemi === -1 ? str : str.substr(0, firstSemi);
+ const c = parseCookiePair(cookiePair, !!options.loose);
+ if (!c) {
+ return;
+ }
+
+ if (firstSemi === -1) {
+ return c;
+ }
+
+ // S5.2.3 "unparsed-attributes consist of the remainder of the set-cookie-string
+ // (including the %x3B (";") in question)." plus later on in the same section
+ // "discard the first ";" and trim".
+ const unparsed = str.slice(firstSemi + 1).trim();
+
+ // "If the unparsed-attributes string is empty, skip the rest of these
+ // steps."
+ if (unparsed.length === 0) {
+ return c;
+ }
+
+ /*
+ * S5.2 says that when looping over the items "[p]rocess the attribute-name
+ * and attribute-value according to the requirements in the following
+ * subsections" for every item. Plus, for many of the individual attributes
+ * in S5.3 it says to use the "attribute-value of the last attribute in the
+ * cookie-attribute-list". Therefore, in this implementation, we overwrite
+ * the previous value.
+ */
+ const cookie_avs = unparsed.split(";");
+ while (cookie_avs.length) {
+ const av = cookie_avs.shift().trim();
+ if (av.length === 0) {
+ // happens if ";;" appears
+ continue;
+ }
+ const av_sep = av.indexOf("=");
+ let av_key, av_value;
+
+ if (av_sep === -1) {
+ av_key = av;
+ av_value = null;
+ } else {
+ av_key = av.substr(0, av_sep);
+ av_value = av.substr(av_sep + 1);
+ }
+
+ av_key = av_key.trim().toLowerCase();
+
+ if (av_value) {
+ av_value = av_value.trim();
+ }
+
+ switch (av_key) {
+ case "expires": // S5.2.1
+ if (av_value) {
+ const exp = parseDate(av_value);
+ // "If the attribute-value failed to parse as a cookie date, ignore the
+ // cookie-av."
+ if (exp) {
+ // over and underflow not realistically a concern: V8's getTime() seems to
+ // store something larger than a 32-bit time_t (even with 32-bit node)
+ c.expires = exp;
+ }
+ }
+ break;
+
+ case "max-age": // S5.2.2
+ if (av_value) {
+ // "If the first character of the attribute-value is not a DIGIT or a "-"
+ // character ...[or]... If the remainder of attribute-value contains a
+ // non-DIGIT character, ignore the cookie-av."
+ if (/^-?[0-9]+$/.test(av_value)) {
+ const delta = parseInt(av_value, 10);
+ // "If delta-seconds is less than or equal to zero (0), let expiry-time
+ // be the earliest representable date and time."
+ c.setMaxAge(delta);
+ }
+ }
+ break;
+
+ case "domain": // S5.2.3
+ // "If the attribute-value is empty, the behavior is undefined. However,
+ // the user agent SHOULD ignore the cookie-av entirely."
+ if (av_value) {
+ // S5.2.3 "Let cookie-domain be the attribute-value without the leading %x2E
+ // (".") character."
+ const domain = av_value.trim().replace(/^\./, "");
+ if (domain) {
+ // "Convert the cookie-domain to lower case."
+ c.domain = domain.toLowerCase();
+ }
+ }
+ break;
+
+ case "path": // S5.2.4
+ /*
+ * "If the attribute-value is empty or if the first character of the
+ * attribute-value is not %x2F ("/"):
+ * Let cookie-path be the default-path.
+ * Otherwise:
+ * Let cookie-path be the attribute-value."
+ *
+ * We'll represent the default-path as null since it depends on the
+ * context of the parsing.
+ */
+ c.path = av_value && av_value[0] === "/" ? av_value : null;
+ break;
+
+ case "secure": // S5.2.5
+ /*
+ * "If the attribute-name case-insensitively matches the string "Secure",
+ * the user agent MUST append an attribute to the cookie-attribute-list
+ * with an attribute-name of Secure and an empty attribute-value."
+ */
+ c.secure = true;
+ break;
+
+ case "httponly": // S5.2.6 -- effectively the same as 'secure'
+ c.httpOnly = true;
+ break;
+
+ case "samesite": // RFC6265bis-02 S5.3.7
+ const enforcement = av_value ? av_value.toLowerCase() : "";
+ switch (enforcement) {
+ case "strict":
+ c.sameSite = "strict";
+ break;
+ case "lax":
+ c.sameSite = "lax";
+ break;
+ default:
+ // RFC6265bis-02 S5.3.7 step 1:
+ // "If cookie-av's attribute-value is not a case-insensitive match
+ // for "Strict" or "Lax", ignore the "cookie-av"."
+ // This effectively sets it to 'none' from the prototype.
+ break;
+ }
+ break;
+
+ default:
+ c.extensions = c.extensions || [];
+ c.extensions.push(av);
+ break;
+ }
+ }
+
+ return c;
+}
+
+/**
+ * If the cookie-name begins with a case-sensitive match for the
+ * string "__Secure-", abort these steps and ignore the cookie
+ * entirely unless the cookie's secure-only-flag is true.
+ * @param cookie
+ * @returns boolean
+ */
+function isSecurePrefixConditionMet(cookie) {
+ return !cookie.key.startsWith("__Secure-") || cookie.secure;
+}
+
+/**
+ * If the cookie-name begins with a case-sensitive match for the
+ * string "__Host-", abort these steps and ignore the cookie
+ * entirely unless the cookie meets all the following criteria:
+ * 1. The cookie's secure-only-flag is true.
+ * 2. The cookie's host-only-flag is true.
+ * 3. The cookie-attribute-list contains an attribute with an
+ * attribute-name of "Path", and the cookie's path is "/".
+ * @param cookie
+ * @returns boolean
+ */
+function isHostPrefixConditionMet(cookie) {
+ return (
+ !cookie.key.startsWith("__Host-") ||
+ (cookie.secure &&
+ cookie.hostOnly &&
+ cookie.path != null &&
+ cookie.path === "/")
+ );
+}
+
+// avoid the V8 deoptimization monster!
+function jsonParse(str) {
+ let obj;
+ try {
+ obj = JSON.parse(str);
+ } catch (e) {
+ return e;
+ }
+ return obj;
+}
+
+function fromJSON(str) {
+ if (!str) {
+ return null;
+ }
+
+ let obj;
+ if (typeof str === "string") {
+ obj = jsonParse(str);
+ if (obj instanceof Error) {
+ return null;
+ }
+ } else {
+ // assume it's an Object
+ obj = str;
+ }
+
+ const c = new Cookie();
+ for (let i = 0; i < Cookie.serializableProperties.length; i++) {
+ const prop = Cookie.serializableProperties[i];
+ if (obj[prop] === undefined || obj[prop] === cookieDefaults[prop]) {
+ continue; // leave as prototype default
+ }
+
+ if (prop === "expires" || prop === "creation" || prop === "lastAccessed") {
+ if (obj[prop] === null) {
+ c[prop] = null;
+ } else {
+ c[prop] = obj[prop] == "Infinity" ? "Infinity" : new Date(obj[prop]);
+ }
+ } else {
+ c[prop] = obj[prop];
+ }
+ }
+
+ return c;
+}
+
+/* Section 5.4 part 2:
+ * "* Cookies with longer paths are listed before cookies with
+ * shorter paths.
+ *
+ * * Among cookies that have equal-length path fields, cookies with
+ * earlier creation-times are listed before cookies with later
+ * creation-times."
+ */
+
+function cookieCompare(a, b) {
+ let cmp = 0;
+
+ // descending for length: b CMP a
+ const aPathLen = a.path ? a.path.length : 0;
+ const bPathLen = b.path ? b.path.length : 0;
+ cmp = bPathLen - aPathLen;
+ if (cmp !== 0) {
+ return cmp;
+ }
+
+ // ascending for time: a CMP b
+ const aTime = a.creation ? a.creation.getTime() : MAX_TIME;
+ const bTime = b.creation ? b.creation.getTime() : MAX_TIME;
+ cmp = aTime - bTime;
+ if (cmp !== 0) {
+ return cmp;
+ }
+
+ // break ties for the same millisecond (precision of JavaScript's clock)
+ cmp = a.creationIndex - b.creationIndex;
+
+ return cmp;
+}
+
+// Gives the permutation of all possible pathMatch()es of a given path. The
+// array is in longest-to-shortest order. Handy for indexing.
+function permutePath(path) {
+ if (path === "/") {
+ return ["/"];
+ }
+ const permutations = [path];
+ while (path.length > 1) {
+ const lindex = path.lastIndexOf("/");
+ if (lindex === 0) {
+ break;
+ }
+ path = path.substr(0, lindex);
+ permutations.push(path);
+ }
+ permutations.push("/");
+ return permutations;
+}
+
+function getCookieContext(url) {
+ if (url instanceof Object) {
+ return url;
+ }
+ // NOTE: decodeURI will throw on malformed URIs (see GH-32).
+ // Therefore, we will just skip decoding for such URIs.
+ try {
+ url = decodeURI(url);
+ } catch (err) {
+ // Silently swallow error
+ }
+
+ return urlParse(url);
+}
+
+const cookieDefaults = {
+ // the order in which the RFC has them:
+ key: "",
+ value: "",
+ expires: "Infinity",
+ maxAge: null,
+ domain: null,
+ path: null,
+ secure: false,
+ httpOnly: false,
+ extensions: null,
+ // set by the CookieJar:
+ hostOnly: null,
+ pathIsDefault: null,
+ creation: null,
+ lastAccessed: null,
+ sameSite: "none"
+};
+
+class Cookie {
+ constructor(options = {}) {
+ if (util.inspect.custom) {
+ this[util.inspect.custom] = this.inspect;
+ }
+
+ Object.assign(this, cookieDefaults, options);
+ this.creation = this.creation || new Date();
+
+ // used to break creation ties in cookieCompare():
+ Object.defineProperty(this, "creationIndex", {
+ configurable: false,
+ enumerable: false, // important for assert.deepEqual checks
+ writable: true,
+ value: ++Cookie.cookiesCreated
+ });
+ }
+
+ inspect() {
+ const now = Date.now();
+ const hostOnly = this.hostOnly != null ? this.hostOnly : "?";
+ const createAge = this.creation
+ ? `${now - this.creation.getTime()}ms`
+ : "?";
+ const accessAge = this.lastAccessed
+ ? `${now - this.lastAccessed.getTime()}ms`
+ : "?";
+ return `Cookie="${this.toString()}; hostOnly=${hostOnly}; aAge=${accessAge}; cAge=${createAge}"`;
+ }
+
+ toJSON() {
+ const obj = {};
+
+ for (const prop of Cookie.serializableProperties) {
+ if (this[prop] === cookieDefaults[prop]) {
+ continue; // leave as prototype default
+ }
+
+ if (
+ prop === "expires" ||
+ prop === "creation" ||
+ prop === "lastAccessed"
+ ) {
+ if (this[prop] === null) {
+ obj[prop] = null;
+ } else {
+ obj[prop] =
+ this[prop] == "Infinity" // intentionally not ===
+ ? "Infinity"
+ : this[prop].toISOString();
+ }
+ } else if (prop === "maxAge") {
+ if (this[prop] !== null) {
+ // again, intentionally not ===
+ obj[prop] =
+ this[prop] == Infinity || this[prop] == -Infinity
+ ? this[prop].toString()
+ : this[prop];
+ }
+ } else {
+ if (this[prop] !== cookieDefaults[prop]) {
+ obj[prop] = this[prop];
+ }
+ }
+ }
+
+ return obj;
+ }
+
+ clone() {
+ return fromJSON(this.toJSON());
+ }
+
+ validate() {
+ if (!COOKIE_OCTETS.test(this.value)) {
+ return false;
+ }
+ if (
+ this.expires != Infinity &&
+ !(this.expires instanceof Date) &&
+ !parseDate(this.expires)
+ ) {
+ return false;
+ }
+ if (this.maxAge != null && this.maxAge <= 0) {
+ return false; // "Max-Age=" non-zero-digit *DIGIT
+ }
+ if (this.path != null && !PATH_VALUE.test(this.path)) {
+ return false;
+ }
+
+ const cdomain = this.cdomain();
+ if (cdomain) {
+ if (cdomain.match(/\.$/)) {
+ return false; // S4.1.2.3 suggests that this is bad. domainMatch() tests confirm this
+ }
+ const suffix = pubsuffix.getPublicSuffix(cdomain);
+ if (suffix == null) {
+ // it's a public suffix
+ return false;
+ }
+ }
+ return true;
+ }
+
+ setExpires(exp) {
+ if (exp instanceof Date) {
+ this.expires = exp;
+ } else {
+ this.expires = parseDate(exp) || "Infinity";
+ }
+ }
+
+ setMaxAge(age) {
+ if (age === Infinity || age === -Infinity) {
+ this.maxAge = age.toString(); // so JSON.stringify() works
+ } else {
+ this.maxAge = age;
+ }
+ }
+
+ cookieString() {
+ let val = this.value;
+ if (val == null) {
+ val = "";
+ }
+ if (this.key === "") {
+ return val;
+ }
+ return `${this.key}=${val}`;
+ }
+
+ // gives Set-Cookie header format
+ toString() {
+ let str = this.cookieString();
+
+ if (this.expires != Infinity) {
+ if (this.expires instanceof Date) {
+ str += `; Expires=${formatDate(this.expires)}`;
+ } else {
+ str += `; Expires=${this.expires}`;
+ }
+ }
+
+ if (this.maxAge != null && this.maxAge != Infinity) {
+ str += `; Max-Age=${this.maxAge}`;
+ }
+
+ if (this.domain && !this.hostOnly) {
+ str += `; Domain=${this.domain}`;
+ }
+ if (this.path) {
+ str += `; Path=${this.path}`;
+ }
+
+ if (this.secure) {
+ str += "; Secure";
+ }
+ if (this.httpOnly) {
+ str += "; HttpOnly";
+ }
+ if (this.sameSite && this.sameSite !== "none") {
+ const ssCanon = Cookie.sameSiteCanonical[this.sameSite.toLowerCase()];
+ str += `; SameSite=${ssCanon ? ssCanon : this.sameSite}`;
+ }
+ if (this.extensions) {
+ this.extensions.forEach(ext => {
+ str += `; ${ext}`;
+ });
+ }
+
+ return str;
+ }
+
+ // TTL() partially replaces the "expiry-time" parts of S5.3 step 3 (setCookie()
+ // elsewhere)
+ // S5.3 says to give the "latest representable date" for which we use Infinity
+ // For "expired" we use 0
+ TTL(now) {
+ /* RFC6265 S4.1.2.2 If a cookie has both the Max-Age and the Expires
+ * attribute, the Max-Age attribute has precedence and controls the
+ * expiration date of the cookie.
+ * (Concurs with S5.3 step 3)
+ */
+ if (this.maxAge != null) {
+ return this.maxAge <= 0 ? 0 : this.maxAge * 1000;
+ }
+
+ let expires = this.expires;
+ if (expires != Infinity) {
+ if (!(expires instanceof Date)) {
+ expires = parseDate(expires) || Infinity;
+ }
+
+ if (expires == Infinity) {
+ return Infinity;
+ }
+
+ return expires.getTime() - (now || Date.now());
+ }
+
+ return Infinity;
+ }
+
+ // expiryTime() replaces the "expiry-time" parts of S5.3 step 3 (setCookie()
+ // elsewhere)
+ expiryTime(now) {
+ if (this.maxAge != null) {
+ const relativeTo = now || this.creation || new Date();
+ const age = this.maxAge <= 0 ? -Infinity : this.maxAge * 1000;
+ return relativeTo.getTime() + age;
+ }
+
+ if (this.expires == Infinity) {
+ return Infinity;
+ }
+ return this.expires.getTime();
+ }
+
+ // expiryDate() replaces the "expiry-time" parts of S5.3 step 3 (setCookie()
+ // elsewhere), except it returns a Date
+ expiryDate(now) {
+ const millisec = this.expiryTime(now);
+ if (millisec == Infinity) {
+ return new Date(MAX_TIME);
+ } else if (millisec == -Infinity) {
+ return new Date(MIN_TIME);
+ } else {
+ return new Date(millisec);
+ }
+ }
+
+ // This replaces the "persistent-flag" parts of S5.3 step 3
+ isPersistent() {
+ return this.maxAge != null || this.expires != Infinity;
+ }
+
+ // Mostly S5.1.2 and S5.2.3:
+ canonicalizedDomain() {
+ if (this.domain == null) {
+ return null;
+ }
+ return canonicalDomain(this.domain);
+ }
+
+ cdomain() {
+ return this.canonicalizedDomain();
+ }
+}
+
+Cookie.cookiesCreated = 0;
+Cookie.parse = parse;
+Cookie.fromJSON = fromJSON;
+Cookie.serializableProperties = Object.keys(cookieDefaults);
+Cookie.sameSiteLevel = {
+ strict: 3,
+ lax: 2,
+ none: 1
+};
+
+Cookie.sameSiteCanonical = {
+ strict: "Strict",
+ lax: "Lax"
+};
+
+function getNormalizedPrefixSecurity(prefixSecurity) {
+ if (prefixSecurity != null) {
+ const normalizedPrefixSecurity = prefixSecurity.toLowerCase();
+ /* The three supported options */
+ switch (normalizedPrefixSecurity) {
+ case PrefixSecurityEnum.STRICT:
+ case PrefixSecurityEnum.SILENT:
+ case PrefixSecurityEnum.DISABLED:
+ return normalizedPrefixSecurity;
+ }
+ }
+ /* Default is SILENT */
+ return PrefixSecurityEnum.SILENT;
+}
+
+class CookieJar {
+ constructor(store, options = { rejectPublicSuffixes: true }) {
+ if (typeof options === "boolean") {
+ options = { rejectPublicSuffixes: options };
+ }
+ this.rejectPublicSuffixes = options.rejectPublicSuffixes;
+ this.enableLooseMode = !!options.looseMode;
+ this.allowSpecialUseDomain = !!options.allowSpecialUseDomain;
+ this.store = store || new MemoryCookieStore();
+ this.prefixSecurity = getNormalizedPrefixSecurity(options.prefixSecurity);
+ this._cloneSync = syncWrap("clone");
+ this._importCookiesSync = syncWrap("_importCookies");
+ this.getCookiesSync = syncWrap("getCookies");
+ this.getCookieStringSync = syncWrap("getCookieString");
+ this.getSetCookieStringsSync = syncWrap("getSetCookieStrings");
+ this.removeAllCookiesSync = syncWrap("removeAllCookies");
+ this.setCookieSync = syncWrap("setCookie");
+ this.serializeSync = syncWrap("serialize");
+ }
+
+ setCookie(cookie, url, options, cb) {
+ let err;
+ const context = getCookieContext(url);
+ if (typeof options === "function") {
+ cb = options;
+ options = {};
+ }
+
+ const host = canonicalDomain(context.hostname);
+ const loose = options.loose || this.enableLooseMode;
+
+ let sameSiteContext = null;
+ if (options.sameSiteContext) {
+ sameSiteContext = checkSameSiteContext(options.sameSiteContext);
+ if (!sameSiteContext) {
+ return cb(new Error(SAME_SITE_CONTEXT_VAL_ERR));
+ }
+ }
+
+ // S5.3 step 1
+ if (typeof cookie === "string" || cookie instanceof String) {
+ cookie = Cookie.parse(cookie, { loose: loose });
+ if (!cookie) {
+ err = new Error("Cookie failed to parse");
+ return cb(options.ignoreError ? null : err);
+ }
+ } else if (!(cookie instanceof Cookie)) {
+ // If you're seeing this error, and are passing in a Cookie object,
+ // it *might* be a Cookie object from another loaded version of tough-cookie.
+ err = new Error(
+ "First argument to setCookie must be a Cookie object or string"
+ );
+ return cb(options.ignoreError ? null : err);
+ }
+
+ // S5.3 step 2
+ const now = options.now || new Date(); // will assign later to save effort in the face of errors
+
+ // S5.3 step 3: NOOP; persistent-flag and expiry-time is handled by getCookie()
+
+ // S5.3 step 4: NOOP; domain is null by default
+
+ // S5.3 step 5: public suffixes
+ if (this.rejectPublicSuffixes && cookie.domain) {
+ const suffix = pubsuffix.getPublicSuffix(cookie.cdomain());
+ if (suffix == null) {
+ // e.g. "com"
+ err = new Error("Cookie has domain set to a public suffix");
+ return cb(options.ignoreError ? null : err);
+ }
+ }
+
+ // S5.3 step 6:
+ if (cookie.domain) {
+ if (!domainMatch(host, cookie.cdomain(), false)) {
+ err = new Error(
+ `Cookie not in this host's domain. Cookie:${cookie.cdomain()} Request:${host}`
+ );
+ return cb(options.ignoreError ? null : err);
+ }
+
+ if (cookie.hostOnly == null) {
+ // don't reset if already set
+ cookie.hostOnly = false;
+ }
+ } else {
+ cookie.hostOnly = true;
+ cookie.domain = host;
+ }
+
+ //S5.2.4 If the attribute-value is empty or if the first character of the
+ //attribute-value is not %x2F ("/"):
+ //Let cookie-path be the default-path.
+ if (!cookie.path || cookie.path[0] !== "/") {
+ cookie.path = defaultPath(context.pathname);
+ cookie.pathIsDefault = true;
+ }
+
+ // S5.3 step 8: NOOP; secure attribute
+ // S5.3 step 9: NOOP; httpOnly attribute
+
+ // S5.3 step 10
+ if (options.http === false && cookie.httpOnly) {
+ err = new Error("Cookie is HttpOnly and this isn't an HTTP API");
+ return cb(options.ignoreError ? null : err);
+ }
+
+ // 6252bis-02 S5.4 Step 13 & 14:
+ if (cookie.sameSite !== "none" && sameSiteContext) {
+ // "If the cookie's "same-site-flag" is not "None", and the cookie
+ // is being set from a context whose "site for cookies" is not an
+ // exact match for request-uri's host's registered domain, then
+ // abort these steps and ignore the newly created cookie entirely."
+ if (sameSiteContext === "none") {
+ err = new Error(
+ "Cookie is SameSite but this is a cross-origin request"
+ );
+ return cb(options.ignoreError ? null : err);
+ }
+ }
+
+ /* 6265bis-02 S5.4 Steps 15 & 16 */
+ const ignoreErrorForPrefixSecurity =
+ this.prefixSecurity === PrefixSecurityEnum.SILENT;
+ const prefixSecurityDisabled =
+ this.prefixSecurity === PrefixSecurityEnum.DISABLED;
+ /* If prefix checking is not disabled ...*/
+ if (!prefixSecurityDisabled) {
+ let errorFound = false;
+ let errorMsg;
+ /* Check secure prefix condition */
+ if (!isSecurePrefixConditionMet(cookie)) {
+ errorFound = true;
+ errorMsg = "Cookie has __Secure prefix but Secure attribute is not set";
+ } else if (!isHostPrefixConditionMet(cookie)) {
+ /* Check host prefix condition */
+ errorFound = true;
+ errorMsg =
+ "Cookie has __Host prefix but either Secure or HostOnly attribute is not set or Path is not '/'";
+ }
+ if (errorFound) {
+ return cb(
+ options.ignoreError || ignoreErrorForPrefixSecurity
+ ? null
+ : new Error(errorMsg)
+ );
+ }
+ }
+
+ const store = this.store;
+
+ if (!store.updateCookie) {
+ store.updateCookie = function(oldCookie, newCookie, cb) {
+ this.putCookie(newCookie, cb);
+ };
+ }
+
+ function withCookie(err, oldCookie) {
+ if (err) {
+ return cb(err);
+ }
+
+ const next = function(err) {
+ if (err) {
+ return cb(err);
+ } else {
+ cb(null, cookie);
+ }
+ };
+
+ if (oldCookie) {
+ // S5.3 step 11 - "If the cookie store contains a cookie with the same name,
+ // domain, and path as the newly created cookie:"
+ if (options.http === false && oldCookie.httpOnly) {
+ // step 11.2
+ err = new Error("old Cookie is HttpOnly and this isn't an HTTP API");
+ return cb(options.ignoreError ? null : err);
+ }
+ cookie.creation = oldCookie.creation; // step 11.3
+ cookie.creationIndex = oldCookie.creationIndex; // preserve tie-breaker
+ cookie.lastAccessed = now;
+ // Step 11.4 (delete cookie) is implied by just setting the new one:
+ store.updateCookie(oldCookie, cookie, next); // step 12
+ } else {
+ cookie.creation = cookie.lastAccessed = now;
+ store.putCookie(cookie, next); // step 12
+ }
+ }
+
+ store.findCookie(cookie.domain, cookie.path, cookie.key, withCookie);
+ }
+
+ // RFC6365 S5.4
+ getCookies(url, options, cb) {
+ const context = getCookieContext(url);
+ if (typeof options === "function") {
+ cb = options;
+ options = {};
+ }
+
+ const host = canonicalDomain(context.hostname);
+ const path = context.pathname || "/";
+
+ let secure = options.secure;
+ if (
+ secure == null &&
+ context.protocol &&
+ (context.protocol == "https:" || context.protocol == "wss:")
+ ) {
+ secure = true;
+ }
+
+ let sameSiteLevel = 0;
+ if (options.sameSiteContext) {
+ const sameSiteContext = checkSameSiteContext(options.sameSiteContext);
+ sameSiteLevel = Cookie.sameSiteLevel[sameSiteContext];
+ if (!sameSiteLevel) {
+ return cb(new Error(SAME_SITE_CONTEXT_VAL_ERR));
+ }
+ }
+
+ let http = options.http;
+ if (http == null) {
+ http = true;
+ }
+
+ const now = options.now || Date.now();
+ const expireCheck = options.expire !== false;
+ const allPaths = !!options.allPaths;
+ const store = this.store;
+
+ function matchingCookie(c) {
+ // "Either:
+ // The cookie's host-only-flag is true and the canonicalized
+ // request-host is identical to the cookie's domain.
+ // Or:
+ // The cookie's host-only-flag is false and the canonicalized
+ // request-host domain-matches the cookie's domain."
+ if (c.hostOnly) {
+ if (c.domain != host) {
+ return false;
+ }
+ } else {
+ if (!domainMatch(host, c.domain, false)) {
+ return false;
+ }
+ }
+
+ // "The request-uri's path path-matches the cookie's path."
+ if (!allPaths && !pathMatch(path, c.path)) {
+ return false;
+ }
+
+ // "If the cookie's secure-only-flag is true, then the request-uri's
+ // scheme must denote a "secure" protocol"
+ if (c.secure && !secure) {
+ return false;
+ }
+
+ // "If the cookie's http-only-flag is true, then exclude the cookie if the
+ // cookie-string is being generated for a "non-HTTP" API"
+ if (c.httpOnly && !http) {
+ return false;
+ }
+
+ // RFC6265bis-02 S5.3.7
+ if (sameSiteLevel) {
+ const cookieLevel = Cookie.sameSiteLevel[c.sameSite || "none"];
+ if (cookieLevel > sameSiteLevel) {
+ // only allow cookies at or below the request level
+ return false;
+ }
+ }
+
+ // deferred from S5.3
+ // non-RFC: allow retention of expired cookies by choice
+ if (expireCheck && c.expiryTime() <= now) {
+ store.removeCookie(c.domain, c.path, c.key, () => {}); // result ignored
+ return false;
+ }
+
+ return true;
+ }
+
+ store.findCookies(
+ host,
+ allPaths ? null : path,
+ this.allowSpecialUseDomain,
+ (err, cookies) => {
+ if (err) {
+ return cb(err);
+ }
+
+ cookies = cookies.filter(matchingCookie);
+
+ // sorting of S5.4 part 2
+ if (options.sort !== false) {
+ cookies = cookies.sort(cookieCompare);
+ }
+
+ // S5.4 part 3
+ const now = new Date();
+ for (const cookie of cookies) {
+ cookie.lastAccessed = now;
+ }
+ // TODO persist lastAccessed
+
+ cb(null, cookies);
+ }
+ );
+ }
+
+ getCookieString(...args) {
+ const cb = args.pop();
+ const next = function(err, cookies) {
+ if (err) {
+ cb(err);
+ } else {
+ cb(
+ null,
+ cookies
+ .sort(cookieCompare)
+ .map(c => c.cookieString())
+ .join("; ")
+ );
+ }
+ };
+ args.push(next);
+ this.getCookies.apply(this, args);
+ }
+
+ getSetCookieStrings(...args) {
+ const cb = args.pop();
+ const next = function(err, cookies) {
+ if (err) {
+ cb(err);
+ } else {
+ cb(
+ null,
+ cookies.map(c => {
+ return c.toString();
+ })
+ );
+ }
+ };
+ args.push(next);
+ this.getCookies.apply(this, args);
+ }
+
+ serialize(cb) {
+ let type = this.store.constructor.name;
+ if (type === "Object") {
+ type = null;
+ }
+
+ // update README.md "Serialization Format" if you change this, please!
+ const serialized = {
+ // The version of tough-cookie that serialized this jar. Generally a good
+ // practice since future versions can make data import decisions based on
+ // known past behavior. When/if this matters, use `semver`.
+ version: `tough-cookie@${VERSION}`,
+
+ // add the store type, to make humans happy:
+ storeType: type,
+
+ // CookieJar configuration:
+ rejectPublicSuffixes: !!this.rejectPublicSuffixes,
+
+ // this gets filled from getAllCookies:
+ cookies: []
+ };
+
+ if (
+ !(
+ this.store.getAllCookies &&
+ typeof this.store.getAllCookies === "function"
+ )
+ ) {
+ return cb(
+ new Error(
+ "store does not support getAllCookies and cannot be serialized"
+ )
+ );
+ }
+
+ this.store.getAllCookies((err, cookies) => {
+ if (err) {
+ return cb(err);
+ }
+
+ serialized.cookies = cookies.map(cookie => {
+ // convert to serialized 'raw' cookies
+ cookie = cookie instanceof Cookie ? cookie.toJSON() : cookie;
+
+ // Remove the index so new ones get assigned during deserialization
+ delete cookie.creationIndex;
+
+ return cookie;
+ });
+
+ return cb(null, serialized);
+ });
+ }
+
+ toJSON() {
+ return this.serializeSync();
+ }
+
+ // use the class method CookieJar.deserialize instead of calling this directly
+ _importCookies(serialized, cb) {
+ let cookies = serialized.cookies;
+ if (!cookies || !Array.isArray(cookies)) {
+ return cb(new Error("serialized jar has no cookies array"));
+ }
+ cookies = cookies.slice(); // do not modify the original
+
+ const putNext = err => {
+ if (err) {
+ return cb(err);
+ }
+
+ if (!cookies.length) {
+ return cb(err, this);
+ }
+
+ let cookie;
+ try {
+ cookie = fromJSON(cookies.shift());
+ } catch (e) {
+ return cb(e);
+ }
+
+ if (cookie === null) {
+ return putNext(null); // skip this cookie
+ }
+
+ this.store.putCookie(cookie, putNext);
+ };
+
+ putNext();
+ }
+
+ clone(newStore, cb) {
+ if (arguments.length === 1) {
+ cb = newStore;
+ newStore = null;
+ }
+
+ this.serialize((err, serialized) => {
+ if (err) {
+ return cb(err);
+ }
+ CookieJar.deserialize(serialized, newStore, cb);
+ });
+ }
+
+ cloneSync(newStore) {
+ if (arguments.length === 0) {
+ return this._cloneSync();
+ }
+ if (!newStore.synchronous) {
+ throw new Error(
+ "CookieJar clone destination store is not synchronous; use async API instead."
+ );
+ }
+ return this._cloneSync(newStore);
+ }
+
+ removeAllCookies(cb) {
+ const store = this.store;
+
+ // Check that the store implements its own removeAllCookies(). The default
+ // implementation in Store will immediately call the callback with a "not
+ // implemented" Error.
+ if (
+ typeof store.removeAllCookies === "function" &&
+ store.removeAllCookies !== Store.prototype.removeAllCookies
+ ) {
+ return store.removeAllCookies(cb);
+ }
+
+ store.getAllCookies((err, cookies) => {
+ if (err) {
+ return cb(err);
+ }
+
+ if (cookies.length === 0) {
+ return cb(null);
+ }
+
+ let completedCount = 0;
+ const removeErrors = [];
+
+ function removeCookieCb(removeErr) {
+ if (removeErr) {
+ removeErrors.push(removeErr);
+ }
+
+ completedCount++;
+
+ if (completedCount === cookies.length) {
+ return cb(removeErrors.length ? removeErrors[0] : null);
+ }
+ }
+
+ cookies.forEach(cookie => {
+ store.removeCookie(
+ cookie.domain,
+ cookie.path,
+ cookie.key,
+ removeCookieCb
+ );
+ });
+ });
+ }
+
+ static deserialize(strOrObj, store, cb) {
+ if (arguments.length !== 3) {
+ // store is optional
+ cb = store;
+ store = null;
+ }
+
+ let serialized;
+ if (typeof strOrObj === "string") {
+ serialized = jsonParse(strOrObj);
+ if (serialized instanceof Error) {
+ return cb(serialized);
+ }
+ } else {
+ serialized = strOrObj;
+ }
+
+ const jar = new CookieJar(store, serialized.rejectPublicSuffixes);
+ jar._importCookies(serialized, err => {
+ if (err) {
+ return cb(err);
+ }
+ cb(null, jar);
+ });
+ }
+
+ static deserializeSync(strOrObj, store) {
+ const serialized =
+ typeof strOrObj === "string" ? JSON.parse(strOrObj) : strOrObj;
+ const jar = new CookieJar(store, serialized.rejectPublicSuffixes);
+
+ // catch this mistake early:
+ if (!jar.store.synchronous) {
+ throw new Error(
+ "CookieJar store is not synchronous; use async API instead."
+ );
+ }
+
+ jar._importCookiesSync(serialized);
+ return jar;
+ }
+}
+CookieJar.fromJSON = CookieJar.deserializeSync;
+
+[
+ "_importCookies",
+ "clone",
+ "getCookies",
+ "getCookieString",
+ "getSetCookieStrings",
+ "removeAllCookies",
+ "serialize",
+ "setCookie"
+].forEach(name => {
+ CookieJar.prototype[name] = fromCallback(CookieJar.prototype[name]);
+});
+CookieJar.deserialize = fromCallback(CookieJar.deserialize);
+
+// Use a closure to provide a true imperative API for synchronous stores.
+function syncWrap(method) {
+ return function(...args) {
+ if (!this.store.synchronous) {
+ throw new Error(
+ "CookieJar store is not synchronous; use async API instead."
+ );
+ }
+
+ let syncErr, syncResult;
+ this[method](...args, (err, result) => {
+ syncErr = err;
+ syncResult = result;
+ });
+
+ if (syncErr) {
+ throw syncErr;
+ }
+ return syncResult;
+ };
+}
+
+exports.version = VERSION;
+exports.CookieJar = CookieJar;
+exports.Cookie = Cookie;
+exports.Store = Store;
+exports.MemoryCookieStore = MemoryCookieStore;
+exports.parseDate = parseDate;
+exports.formatDate = formatDate;
+exports.parse = parse;
+exports.fromJSON = fromJSON;
+exports.domainMatch = domainMatch;
+exports.defaultPath = defaultPath;
+exports.pathMatch = pathMatch;
+exports.getPublicSuffix = pubsuffix.getPublicSuffix;
+exports.cookieCompare = cookieCompare;
+exports.permuteDomain = __webpack_require__(89).permuteDomain;
+exports.permutePath = permutePath;
+exports.canonicalDomain = canonicalDomain;
+exports.PrefixSecurityEnum = PrefixSecurityEnum;
+
+
+/***/ }),
+/* 394 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+var stream = __webpack_require__(914)
+var eos = __webpack_require__(3)
+var inherits = __webpack_require__(689)
+var shift = __webpack_require__(475)
+
+var SIGNAL_FLUSH = (Buffer.from && Buffer.from !== Uint8Array.from)
+ ? Buffer.from([0])
+ : new Buffer([0])
+
+var onuncork = function(self, fn) {
+ if (self._corked) self.once('uncork', fn)
+ else fn()
+}
+
+var autoDestroy = function (self, err) {
+ if (self._autoDestroy) self.destroy(err)
+}
+
+var destroyer = function(self, end) {
+ return function(err) {
+ if (err) autoDestroy(self, err.message === 'premature close' ? null : err)
+ else if (end && !self._ended) self.end()
+ }
+}
+
+var end = function(ws, fn) {
+ if (!ws) return fn()
+ if (ws._writableState && ws._writableState.finished) return fn()
+ if (ws._writableState) return ws.end(fn)
+ ws.end()
+ fn()
+}
+
+var toStreams2 = function(rs) {
+ return new (stream.Readable)({objectMode:true, highWaterMark:16}).wrap(rs)
+}
+
+var Duplexify = function(writable, readable, opts) {
+ if (!(this instanceof Duplexify)) return new Duplexify(writable, readable, opts)
+ stream.Duplex.call(this, opts)
+
+ this._writable = null
+ this._readable = null
+ this._readable2 = null
+
+ this._autoDestroy = !opts || opts.autoDestroy !== false
+ this._forwardDestroy = !opts || opts.destroy !== false
+ this._forwardEnd = !opts || opts.end !== false
+ this._corked = 1 // start corked
+ this._ondrain = null
+ this._drained = false
+ this._forwarding = false
+ this._unwrite = null
+ this._unread = null
+ this._ended = false
+
+ this.destroyed = false
+
+ if (writable) this.setWritable(writable)
+ if (readable) this.setReadable(readable)
+}
+
+inherits(Duplexify, stream.Duplex)
+
+Duplexify.obj = function(writable, readable, opts) {
+ if (!opts) opts = {}
+ opts.objectMode = true
+ opts.highWaterMark = 16
+ return new Duplexify(writable, readable, opts)
+}
+
+Duplexify.prototype.cork = function() {
+ if (++this._corked === 1) this.emit('cork')
+}
+
+Duplexify.prototype.uncork = function() {
+ if (this._corked && --this._corked === 0) this.emit('uncork')
+}
+
+Duplexify.prototype.setWritable = function(writable) {
+ if (this._unwrite) this._unwrite()
+
+ if (this.destroyed) {
+ if (writable && writable.destroy) writable.destroy()
+ return
+ }
+
+ if (writable === null || writable === false) {
+ this.end()
+ return
+ }
+
+ var self = this
+ var unend = eos(writable, {writable:true, readable:false}, destroyer(this, this._forwardEnd))
+
+ var ondrain = function() {
+ var ondrain = self._ondrain
+ self._ondrain = null
+ if (ondrain) ondrain()
+ }
+
+ var clear = function() {
+ self._writable.removeListener('drain', ondrain)
+ unend()
+ }
+
+ if (this._unwrite) process.nextTick(ondrain) // force a drain on stream reset to avoid livelocks
+
+ this._writable = writable
+ this._writable.on('drain', ondrain)
+ this._unwrite = clear
+
+ this.uncork() // always uncork setWritable
+}
+
+Duplexify.prototype.setReadable = function(readable) {
+ if (this._unread) this._unread()
+
+ if (this.destroyed) {
+ if (readable && readable.destroy) readable.destroy()
+ return
+ }
+
+ if (readable === null || readable === false) {
+ this.push(null)
+ this.resume()
+ return
+ }
+
+ var self = this
+ var unend = eos(readable, {writable:false, readable:true}, destroyer(this))
+
+ var onreadable = function() {
+ self._forward()
+ }
+
+ var onend = function() {
+ self.push(null)
+ }
+
+ var clear = function() {
+ self._readable2.removeListener('readable', onreadable)
+ self._readable2.removeListener('end', onend)
+ unend()
+ }
+
+ this._drained = true
+ this._readable = readable
+ this._readable2 = readable._readableState ? readable : toStreams2(readable)
+ this._readable2.on('readable', onreadable)
+ this._readable2.on('end', onend)
+ this._unread = clear
+
+ this._forward()
+}
+
+Duplexify.prototype._read = function() {
+ this._drained = true
+ this._forward()
+}
+
+Duplexify.prototype._forward = function() {
+ if (this._forwarding || !this._readable2 || !this._drained) return
+ this._forwarding = true
+
+ var data
+
+ while (this._drained && (data = shift(this._readable2)) !== null) {
+ if (this.destroyed) continue
+ this._drained = this.push(data)
+ }
+
+ this._forwarding = false
+}
+
+Duplexify.prototype.destroy = function(err) {
+ if (this.destroyed) return
+ this.destroyed = true
+
+ var self = this
+ process.nextTick(function() {
+ self._destroy(err)
+ })
+}
+
+Duplexify.prototype._destroy = function(err) {
+ if (err) {
+ var ondrain = this._ondrain
+ this._ondrain = null
+ if (ondrain) ondrain(err)
+ else this.emit('error', err)
+ }
+
+ if (this._forwardDestroy) {
+ if (this._readable && this._readable.destroy) this._readable.destroy()
+ if (this._writable && this._writable.destroy) this._writable.destroy()
+ }
+
+ this.emit('close')
+}
+
+Duplexify.prototype._write = function(data, enc, cb) {
+ if (this.destroyed) return cb()
+ if (this._corked) return onuncork(this, this._write.bind(this, data, enc, cb))
+ if (data === SIGNAL_FLUSH) return this._finish(cb)
+ if (!this._writable) return cb()
+
+ if (this._writable.write(data) === false) this._ondrain = cb
+ else cb()
+}
+
+Duplexify.prototype._finish = function(cb) {
+ var self = this
+ this.emit('preend')
+ onuncork(this, function() {
+ end(self._forwardEnd && self._writable, function() {
+ // haxx to not emit prefinish twice
+ if (self._writableState.prefinished === false) self._writableState.prefinished = true
+ self.emit('prefinish')
+ onuncork(self, cb)
+ })
+ })
+}
+
+Duplexify.prototype.end = function(data, enc, cb) {
+ if (typeof data === 'function') return this.end(null, null, data)
+ if (typeof enc === 'function') return this.end(data, null, enc)
+ this._ended = true
+ if (data) this.write(data)
+ if (!this._writableState.ending) this.write(SIGNAL_FLUSH)
+ return stream.Writable.prototype.end.call(this, cb)
+}
+
+module.exports = Duplexify
+
+
+/***/ }),
+/* 395 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+
+const cloneDeep = __webpack_require__(452)
+const figgyPudding = __webpack_require__(965)
+const { fixer } = __webpack_require__(821)
+const getStream = __webpack_require__(145)
+const npa = __webpack_require__(482)
+const npmAuth = __webpack_require__(872)
+const npmFetch = __webpack_require__(789)
+const semver = __webpack_require__(516)
+const ssri = __webpack_require__(951)
+const url = __webpack_require__(835)
+const validate = __webpack_require__(772)
+
+const PublishConfig = figgyPudding({
+ access: {},
+ algorithms: { default: ['sha512'] },
+ npmVersion: {},
+ tag: { default: 'latest' },
+ Promise: { default: () => Promise }
+})
+
+module.exports = publish
+function publish (manifest, tarball, opts) {
+ opts = PublishConfig(opts)
+ return new opts.Promise(resolve => resolve()).then(() => {
+ validate('OSO|OOO', [manifest, tarball, opts])
+ if (manifest.private) {
+ throw Object.assign(new Error(
+ 'This package has been marked as private\n' +
+ "Remove the 'private' field from the package.json to publish it."
+ ), { code: 'EPRIVATE' })
+ }
+ const spec = npa.resolve(manifest.name, manifest.version)
+ // NOTE: spec is used to pick the appropriate registry/auth combo.
+ opts = opts.concat(manifest.publishConfig, { spec })
+ const reg = npmFetch.pickRegistry(spec, opts)
+ const auth = npmAuth(reg, opts)
+ const pubManifest = patchedManifest(spec, auth, manifest, opts)
+
+ // registry-frontdoor cares about the access level, which is only
+ // configurable for scoped packages
+ if (!spec.scope && opts.access === 'restricted') {
+ throw Object.assign(
+ new Error("Can't restrict access to unscoped packages."),
+ { code: 'EUNSCOPED' }
+ )
+ }
+
+ return slurpTarball(tarball, opts).then(tardata => {
+ const metadata = buildMetadata(
+ spec, auth, reg, pubManifest, tardata, opts
+ )
+ return npmFetch(spec.escapedName, opts.concat({
+ method: 'PUT',
+ body: metadata,
+ ignoreBody: true
+ })).catch(err => {
+ if (err.code !== 'E409') { throw err }
+ return npmFetch.json(spec.escapedName, opts.concat({
+ query: { write: true }
+ })).then(
+ current => patchMetadata(current, metadata, opts)
+ ).then(newMetadata => {
+ return npmFetch(spec.escapedName, opts.concat({
+ method: 'PUT',
+ body: newMetadata,
+ ignoreBody: true
+ }))
+ })
+ })
+ })
+ }).then(() => true)
+}
+
+function patchedManifest (spec, auth, base, opts) {
+ const manifest = cloneDeep(base)
+ manifest._nodeVersion = process.versions.node
+ if (opts.npmVersion) {
+ manifest._npmVersion = opts.npmVersion
+ }
+ if (auth.username || auth.email) {
+ // NOTE: This is basically pointless, but reproduced because it's what
+ // legacy does: tl;dr `auth.username` and `auth.email` are going to be
+ // undefined in any auth situation that uses tokens instead of plain
+ // auth. I can only assume some registries out there decided that
+ // _npmUser would be of any use to them, but _npmUser in packuments
+ // currently gets filled in by the npm registry itself, based on auth
+ // information.
+ manifest._npmUser = {
+ name: auth.username,
+ email: auth.email
+ }
+ }
+
+ fixer.fixNameField(manifest, { strict: true, allowLegacyCase: true })
+ const version = semver.clean(manifest.version)
+ if (!version) {
+ throw Object.assign(
+ new Error('invalid semver: ' + manifest.version),
+ { code: 'EBADSEMVER' }
+ )
+ }
+ manifest.version = version
+ return manifest
+}
+
+function buildMetadata (spec, auth, registry, manifest, tardata, opts) {
+ const root = {
+ _id: manifest.name,
+ name: manifest.name,
+ description: manifest.description,
+ 'dist-tags': {},
+ versions: {},
+ readme: manifest.readme || ''
+ }
+
+ if (opts.access) root.access = opts.access
+
+ if (!auth.token) {
+ root.maintainers = [{ name: auth.username, email: auth.email }]
+ manifest.maintainers = JSON.parse(JSON.stringify(root.maintainers))
+ }
+
+ root.versions[manifest.version] = manifest
+ const tag = manifest.tag || opts.tag
+ root['dist-tags'][tag] = manifest.version
+
+ const tbName = manifest.name + '-' + manifest.version + '.tgz'
+ const tbURI = manifest.name + '/-/' + tbName
+ const integrity = ssri.fromData(tardata, {
+ algorithms: [...new Set(['sha1'].concat(opts.algorithms))]
+ })
+
+ manifest._id = manifest.name + '@' + manifest.version
+ manifest.dist = manifest.dist || {}
+ // Don't bother having sha1 in the actual integrity field
+ manifest.dist.integrity = integrity.sha512[0].toString()
+ // Legacy shasum support
+ manifest.dist.shasum = integrity.sha1[0].hexDigest()
+ manifest.dist.tarball = url.resolve(registry, tbURI)
+ .replace(/^https:\/\//, 'http://')
+
+ root._attachments = {}
+ root._attachments[tbName] = {
+ content_type: 'application/octet-stream',
+ data: tardata.toString('base64'),
+ length: tardata.length
+ }
+
+ return root
+}
+
+function patchMetadata (current, newData, opts) {
+ const curVers = Object.keys(current.versions || {}).map(v => {
+ return semver.clean(v, true)
+ }).concat(Object.keys(current.time || {}).map(v => {
+ if (semver.valid(v, true)) { return semver.clean(v, true) }
+ })).filter(v => v)
+
+ const newVersion = Object.keys(newData.versions)[0]
+
+ if (curVers.indexOf(newVersion) !== -1) {
+ throw ConflictError(newData.name, newData.version)
+ }
+
+ current.versions = current.versions || {}
+ current.versions[newVersion] = newData.versions[newVersion]
+ for (var i in newData) {
+ switch (i) {
+ // objects that copy over the new stuffs
+ case 'dist-tags':
+ case 'versions':
+ case '_attachments':
+ for (var j in newData[i]) {
+ current[i] = current[i] || {}
+ current[i][j] = newData[i][j]
+ }
+ break
+
+ // ignore these
+ case 'maintainers':
+ break
+
+ // copy
+ default:
+ current[i] = newData[i]
+ }
+ }
+ const maint = newData.maintainers && JSON.parse(JSON.stringify(newData.maintainers))
+ newData.versions[newVersion].maintainers = maint
+ return current
+}
+
+function slurpTarball (tarSrc, opts) {
+ if (Buffer.isBuffer(tarSrc)) {
+ return opts.Promise.resolve(tarSrc)
+ } else if (typeof tarSrc === 'string') {
+ return opts.Promise.resolve(Buffer.from(tarSrc, 'base64'))
+ } else if (typeof tarSrc.pipe === 'function') {
+ return getStream.buffer(tarSrc)
+ } else {
+ return opts.Promise.reject(Object.assign(
+ new Error('invalid tarball argument. Must be a Buffer, a base64 string, or a binary stream'), {
+ code: 'EBADTAR'
+ }))
+ }
+}
+
+function ConflictError (pkgid, version) {
+ return Object.assign(new Error(
+ `Cannot publish ${pkgid}@${version} over existing version.`
+ ), {
+ code: 'EPUBLISHCONFLICT',
+ pkgid,
+ version
+ })
+}
+
+
+/***/ }),
+/* 396 */
+/***/ (function(module) {
+
+"use strict";
+
+module.exports = function (Yallist) {
+ Yallist.prototype[Symbol.iterator] = function* () {
+ for (let walker = this.head; walker; walker = walker.next) {
+ yield walker.value
+ }
+ }
+}
+
+
+/***/ }),
+/* 397 */,
+/* 398 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+var util = __webpack_require__(669)
+var TrackerBase = __webpack_require__(187)
+var Tracker = __webpack_require__(623)
+var TrackerStream = __webpack_require__(235)
+
+var TrackerGroup = module.exports = function (name) {
+ TrackerBase.call(this, name)
+ this.parentGroup = null
+ this.trackers = []
+ this.completion = {}
+ this.weight = {}
+ this.totalWeight = 0
+ this.finished = false
+ this.bubbleChange = bubbleChange(this)
+}
+util.inherits(TrackerGroup, TrackerBase)
+
+function bubbleChange (trackerGroup) {
+ return function (name, completed, tracker) {
+ trackerGroup.completion[tracker.id] = completed
+ if (trackerGroup.finished) return
+ trackerGroup.emit('change', name || trackerGroup.name, trackerGroup.completed(), trackerGroup)
+ }
+}
+
+TrackerGroup.prototype.nameInTree = function () {
+ var names = []
+ var from = this
+ while (from) {
+ names.unshift(from.name)
+ from = from.parentGroup
+ }
+ return names.join('/')
+}
+
+TrackerGroup.prototype.addUnit = function (unit, weight) {
+ if (unit.addUnit) {
+ var toTest = this
+ while (toTest) {
+ if (unit === toTest) {
+ throw new Error(
+ 'Attempted to add tracker group ' +
+ unit.name + ' to tree that already includes it ' +
+ this.nameInTree(this))
+ }
+ toTest = toTest.parentGroup
+ }
+ unit.parentGroup = this
+ }
+ this.weight[unit.id] = weight || 1
+ this.totalWeight += this.weight[unit.id]
+ this.trackers.push(unit)
+ this.completion[unit.id] = unit.completed()
+ unit.on('change', this.bubbleChange)
+ if (!this.finished) this.emit('change', unit.name, this.completion[unit.id], unit)
+ return unit
+}
+
+TrackerGroup.prototype.completed = function () {
+ if (this.trackers.length === 0) return 0
+ var valPerWeight = 1 / this.totalWeight
+ var completed = 0
+ for (var ii = 0; ii < this.trackers.length; ii++) {
+ var trackerId = this.trackers[ii].id
+ completed += valPerWeight * this.weight[trackerId] * this.completion[trackerId]
+ }
+ return completed
+}
+
+TrackerGroup.prototype.newGroup = function (name, weight) {
+ return this.addUnit(new TrackerGroup(name), weight)
+}
+
+TrackerGroup.prototype.newItem = function (name, todo, weight) {
+ return this.addUnit(new Tracker(name, todo), weight)
+}
+
+TrackerGroup.prototype.newStream = function (name, todo, weight) {
+ return this.addUnit(new TrackerStream(name, todo), weight)
+}
+
+TrackerGroup.prototype.finish = function () {
+ this.finished = true
+ if (!this.trackers.length) this.addUnit(new Tracker(), 1, true)
+ for (var ii = 0; ii < this.trackers.length; ii++) {
+ var tracker = this.trackers[ii]
+ tracker.finish()
+ tracker.removeListener('change', this.bubbleChange)
+ }
+ this.emit('change', this.name, 1, this)
+}
+
+var buffer = ' '
+TrackerGroup.prototype.debug = function (depth) {
+ depth = depth || 0
+ var indent = depth ? buffer.substr(0, depth) : ''
+ var output = indent + (this.name || 'top') + ': ' + this.completed() + '\n'
+ this.trackers.forEach(function (tracker) {
+ if (tracker instanceof TrackerGroup) {
+ output += tracker.debug(depth + 1)
+ } else {
+ output += indent + ' ' + tracker.name + ': ' + tracker.completed() + '\n'
+ }
+ })
+ return output
+}
+
+
+/***/ }),
+/* 399 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+module.exports = inflight
+
+let Bluebird
+try {
+ Bluebird = __webpack_require__(900)
+} catch (_) {
+ Bluebird = Promise
+}
+
+const active = {}
+inflight.active = active
+function inflight (unique, doFly) {
+ return Bluebird.all([unique, doFly]).then(function (args) {
+ const unique = args[0]
+ const doFly = args[1]
+ if (Array.isArray(unique)) {
+ return Bluebird.all(unique).then(function (uniqueArr) {
+ return _inflight(uniqueArr.join(''), doFly)
+ })
+ } else {
+ return _inflight(unique, doFly)
+ }
+ })
+
+ function _inflight (unique, doFly) {
+ if (!active[unique]) {
+ active[unique] = (new Bluebird(function (resolve) {
+ return resolve(doFly())
+ }))
+ active[unique].then(cleanup, cleanup)
+ function cleanup() { delete active[unique] }
+ }
+ return active[unique]
+ }
+}
+
+
+/***/ }),
+/* 400 */
+/***/ (function(module, exports, __webpack_require__) {
+
+var Stream = __webpack_require__(794)
+
+// through
+//
+// a stream that does nothing but re-emit the input.
+// useful for aggregating a series of changing but not ending streams into one stream)
+
+exports = module.exports = through
+through.through = through
+
+//create a readable writable stream.
+
+function through (write, end, opts) {
+ write = write || function (data) { this.queue(data) }
+ end = end || function () { this.queue(null) }
+
+ var ended = false, destroyed = false, buffer = [], _ended = false
+ var stream = new Stream()
+ stream.readable = stream.writable = true
+ stream.paused = false
+
+// stream.autoPause = !(opts && opts.autoPause === false)
+ stream.autoDestroy = !(opts && opts.autoDestroy === false)
+
+ stream.write = function (data) {
+ write.call(this, data)
+ return !stream.paused
+ }
+
+ function drain() {
+ while(buffer.length && !stream.paused) {
+ var data = buffer.shift()
+ if(null === data)
+ return stream.emit('end')
+ else
+ stream.emit('data', data)
+ }
+ }
+
+ stream.queue = stream.push = function (data) {
+// console.error(ended)
+ if(_ended) return stream
+ if(data === null) _ended = true
+ buffer.push(data)
+ drain()
+ return stream
+ }
+
+ //this will be registered as the first 'end' listener
+ //must call destroy next tick, to make sure we're after any
+ //stream piped from here.
+ //this is only a problem if end is not emitted synchronously.
+ //a nicer way to do this is to make sure this is the last listener for 'end'
+
+ stream.on('end', function () {
+ stream.readable = false
+ if(!stream.writable && stream.autoDestroy)
+ process.nextTick(function () {
+ stream.destroy()
+ })
+ })
+
+ function _end () {
+ stream.writable = false
+ end.call(stream)
+ if(!stream.readable && stream.autoDestroy)
+ stream.destroy()
+ }
+
+ stream.end = function (data) {
+ if(ended) return
+ ended = true
+ if(arguments.length) stream.write(data)
+ _end() // will emit or queue
+ return stream
+ }
+
+ stream.destroy = function () {
+ if(destroyed) return
+ destroyed = true
+ ended = true
+ buffer.length = 0
+ stream.writable = stream.readable = false
+ stream.emit('close')
+ return stream
+ }
+
+ stream.pause = function () {
+ if(stream.paused) return
+ stream.paused = true
+ return stream
+ }
+
+ stream.resume = function () {
+ if(stream.paused) {
+ stream.paused = false
+ stream.emit('resume')
+ }
+ drain()
+ //may have become paused again,
+ //as drain emits 'data'.
+ if(!stream.paused)
+ stream.emit('drain')
+ return stream
+ }
+ return stream
+}
+
+
+
+/***/ }),
+/* 401 */
+/***/ (function(module, exports, __webpack_require__) {
+
+// info about each config option.
+
+var debug = process.env.DEBUG_NOPT || process.env.NOPT_DEBUG
+ ? function () { console.error.apply(console, arguments) }
+ : function () {}
+
+var url = __webpack_require__(835)
+ , path = __webpack_require__(622)
+ , Stream = __webpack_require__(794).Stream
+ , abbrev = __webpack_require__(916)
+ , osenv = __webpack_require__(580)
+
+module.exports = exports = nopt
+exports.clean = clean
+
+exports.typeDefs =
+ { String : { type: String, validate: validateString }
+ , Boolean : { type: Boolean, validate: validateBoolean }
+ , url : { type: url, validate: validateUrl }
+ , Number : { type: Number, validate: validateNumber }
+ , path : { type: path, validate: validatePath }
+ , Stream : { type: Stream, validate: validateStream }
+ , Date : { type: Date, validate: validateDate }
+ }
+
+function nopt (types, shorthands, args, slice) {
+ args = args || process.argv
+ types = types || {}
+ shorthands = shorthands || {}
+ if (typeof slice !== "number") slice = 2
+
+ debug(types, shorthands, args, slice)
+
+ args = args.slice(slice)
+ var data = {}
+ , key
+ , argv = {
+ remain: [],
+ cooked: args,
+ original: args.slice(0)
+ }
+
+ parse(args, data, argv.remain, types, shorthands)
+ // now data is full
+ clean(data, types, exports.typeDefs)
+ data.argv = argv
+ Object.defineProperty(data.argv, 'toString', { value: function () {
+ return this.original.map(JSON.stringify).join(" ")
+ }, enumerable: false })
+ return data
+}
+
+function clean (data, types, typeDefs) {
+ typeDefs = typeDefs || exports.typeDefs
+ var remove = {}
+ , typeDefault = [false, true, null, String, Array]
+
+ Object.keys(data).forEach(function (k) {
+ if (k === "argv") return
+ var val = data[k]
+ , isArray = Array.isArray(val)
+ , type = types[k]
+ if (!isArray) val = [val]
+ if (!type) type = typeDefault
+ if (type === Array) type = typeDefault.concat(Array)
+ if (!Array.isArray(type)) type = [type]
+
+ debug("val=%j", val)
+ debug("types=", type)
+ val = val.map(function (val) {
+ // if it's an unknown value, then parse false/true/null/numbers/dates
+ if (typeof val === "string") {
+ debug("string %j", val)
+ val = val.trim()
+ if ((val === "null" && ~type.indexOf(null))
+ || (val === "true" &&
+ (~type.indexOf(true) || ~type.indexOf(Boolean)))
+ || (val === "false" &&
+ (~type.indexOf(false) || ~type.indexOf(Boolean)))) {
+ val = JSON.parse(val)
+ debug("jsonable %j", val)
+ } else if (~type.indexOf(Number) && !isNaN(val)) {
+ debug("convert to number", val)
+ val = +val
+ } else if (~type.indexOf(Date) && !isNaN(Date.parse(val))) {
+ debug("convert to date", val)
+ val = new Date(val)
+ }
+ }
+
+ if (!types.hasOwnProperty(k)) {
+ return val
+ }
+
+ // allow `--no-blah` to set 'blah' to null if null is allowed
+ if (val === false && ~type.indexOf(null) &&
+ !(~type.indexOf(false) || ~type.indexOf(Boolean))) {
+ val = null
+ }
+
+ var d = {}
+ d[k] = val
+ debug("prevalidated val", d, val, types[k])
+ if (!validate(d, k, val, types[k], typeDefs)) {
+ if (exports.invalidHandler) {
+ exports.invalidHandler(k, val, types[k], data)
+ } else if (exports.invalidHandler !== false) {
+ debug("invalid: "+k+"="+val, types[k])
+ }
+ return remove
+ }
+ debug("validated val", d, val, types[k])
+ return d[k]
+ }).filter(function (val) { return val !== remove })
+
+ // if we allow Array specifically, then an empty array is how we
+ // express 'no value here', not null. Allow it.
+ if (!val.length && type.indexOf(Array) === -1) {
+ debug('VAL HAS NO LENGTH, DELETE IT', val, k, type.indexOf(Array))
+ delete data[k]
+ }
+ else if (isArray) {
+ debug(isArray, data[k], val)
+ data[k] = val
+ } else data[k] = val[0]
+
+ debug("k=%s val=%j", k, val, data[k])
+ })
+}
+
+function validateString (data, k, val) {
+ data[k] = String(val)
+}
+
+function validatePath (data, k, val) {
+ if (val === true) return false
+ if (val === null) return true
+
+ val = String(val)
+
+ var isWin = process.platform === 'win32'
+ , homePattern = isWin ? /^~(\/|\\)/ : /^~\//
+ , home = osenv.home()
+
+ if (home && val.match(homePattern)) {
+ data[k] = path.resolve(home, val.substr(2))
+ } else {
+ data[k] = path.resolve(val)
+ }
+ return true
+}
+
+function validateNumber (data, k, val) {
+ debug("validate Number %j %j %j", k, val, isNaN(val))
+ if (isNaN(val)) return false
+ data[k] = +val
+}
+
+function validateDate (data, k, val) {
+ var s = Date.parse(val)
+ debug("validate Date %j %j %j", k, val, s)
+ if (isNaN(s)) return false
+ data[k] = new Date(val)
+}
+
+function validateBoolean (data, k, val) {
+ if (val instanceof Boolean) val = val.valueOf()
+ else if (typeof val === "string") {
+ if (!isNaN(val)) val = !!(+val)
+ else if (val === "null" || val === "false") val = false
+ else val = true
+ } else val = !!val
+ data[k] = val
+}
+
+function validateUrl (data, k, val) {
+ val = url.parse(String(val))
+ if (!val.host) return false
+ data[k] = val.href
+}
+
+function validateStream (data, k, val) {
+ if (!(val instanceof Stream)) return false
+ data[k] = val
+}
+
+function validate (data, k, val, type, typeDefs) {
+ // arrays are lists of types.
+ if (Array.isArray(type)) {
+ for (var i = 0, l = type.length; i < l; i ++) {
+ if (type[i] === Array) continue
+ if (validate(data, k, val, type[i], typeDefs)) return true
+ }
+ delete data[k]
+ return false
+ }
+
+ // an array of anything?
+ if (type === Array) return true
+
+ // NaN is poisonous. Means that something is not allowed.
+ if (type !== type) {
+ debug("Poison NaN", k, val, type)
+ delete data[k]
+ return false
+ }
+
+ // explicit list of values
+ if (val === type) {
+ debug("Explicitly allowed %j", val)
+ // if (isArray) (data[k] = data[k] || []).push(val)
+ // else data[k] = val
+ data[k] = val
+ return true
+ }
+
+ // now go through the list of typeDefs, validate against each one.
+ var ok = false
+ , types = Object.keys(typeDefs)
+ for (var i = 0, l = types.length; i < l; i ++) {
+ debug("test type %j %j %j", k, val, types[i])
+ var t = typeDefs[types[i]]
+ if (t &&
+ ((type && type.name && t.type && t.type.name) ? (type.name === t.type.name) : (type === t.type))) {
+ var d = {}
+ ok = false !== t.validate(d, k, val)
+ val = d[k]
+ if (ok) {
+ // if (isArray) (data[k] = data[k] || []).push(val)
+ // else data[k] = val
+ data[k] = val
+ break
+ }
+ }
+ }
+ debug("OK? %j (%j %j %j)", ok, k, val, types[i])
+
+ if (!ok) delete data[k]
+ return ok
+}
+
+function parse (args, data, remain, types, shorthands) {
+ debug("parse", args, data, remain)
+
+ var key = null
+ , abbrevs = abbrev(Object.keys(types))
+ , shortAbbr = abbrev(Object.keys(shorthands))
+
+ for (var i = 0; i < args.length; i ++) {
+ var arg = args[i]
+ debug("arg", arg)
+
+ if (arg.match(/^-{2,}$/)) {
+ // done with keys.
+ // the rest are args.
+ remain.push.apply(remain, args.slice(i + 1))
+ args[i] = "--"
+ break
+ }
+ var hadEq = false
+ if (arg.charAt(0) === "-" && arg.length > 1) {
+ var at = arg.indexOf('=')
+ if (at > -1) {
+ hadEq = true
+ var v = arg.substr(at + 1)
+ arg = arg.substr(0, at)
+ args.splice(i, 1, arg, v)
+ }
+
+ // see if it's a shorthand
+ // if so, splice and back up to re-parse it.
+ var shRes = resolveShort(arg, shorthands, shortAbbr, abbrevs)
+ debug("arg=%j shRes=%j", arg, shRes)
+ if (shRes) {
+ debug(arg, shRes)
+ args.splice.apply(args, [i, 1].concat(shRes))
+ if (arg !== shRes[0]) {
+ i --
+ continue
+ }
+ }
+ arg = arg.replace(/^-+/, "")
+ var no = null
+ while (arg.toLowerCase().indexOf("no-") === 0) {
+ no = !no
+ arg = arg.substr(3)
+ }
+
+ if (abbrevs[arg]) arg = abbrevs[arg]
+
+ var argType = types[arg]
+ var isTypeArray = Array.isArray(argType)
+ if (isTypeArray && argType.length === 1) {
+ isTypeArray = false
+ argType = argType[0]
+ }
+
+ var isArray = argType === Array ||
+ isTypeArray && argType.indexOf(Array) !== -1
+
+ // allow unknown things to be arrays if specified multiple times.
+ if (!types.hasOwnProperty(arg) && data.hasOwnProperty(arg)) {
+ if (!Array.isArray(data[arg]))
+ data[arg] = [data[arg]]
+ isArray = true
+ }
+
+ var val
+ , la = args[i + 1]
+
+ var isBool = typeof no === 'boolean' ||
+ argType === Boolean ||
+ isTypeArray && argType.indexOf(Boolean) !== -1 ||
+ (typeof argType === 'undefined' && !hadEq) ||
+ (la === "false" &&
+ (argType === null ||
+ isTypeArray && ~argType.indexOf(null)))
+
+ if (isBool) {
+ // just set and move along
+ val = !no
+ // however, also support --bool true or --bool false
+ if (la === "true" || la === "false") {
+ val = JSON.parse(la)
+ la = null
+ if (no) val = !val
+ i ++
+ }
+
+ // also support "foo":[Boolean, "bar"] and "--foo bar"
+ if (isTypeArray && la) {
+ if (~argType.indexOf(la)) {
+ // an explicit type
+ val = la
+ i ++
+ } else if ( la === "null" && ~argType.indexOf(null) ) {
+ // null allowed
+ val = null
+ i ++
+ } else if ( !la.match(/^-{2,}[^-]/) &&
+ !isNaN(la) &&
+ ~argType.indexOf(Number) ) {
+ // number
+ val = +la
+ i ++
+ } else if ( !la.match(/^-[^-]/) && ~argType.indexOf(String) ) {
+ // string
+ val = la
+ i ++
+ }
+ }
+
+ if (isArray) (data[arg] = data[arg] || []).push(val)
+ else data[arg] = val
+
+ continue
+ }
+
+ if (argType === String) {
+ if (la === undefined) {
+ la = ""
+ } else if (la.match(/^-{1,2}[^-]+/)) {
+ la = ""
+ i --
+ }
+ }
+
+ if (la && la.match(/^-{2,}$/)) {
+ la = undefined
+ i --
+ }
+
+ val = la === undefined ? true : la
+ if (isArray) (data[arg] = data[arg] || []).push(val)
+ else data[arg] = val
+
+ i ++
+ continue
+ }
+ remain.push(arg)
+ }
+}
+
+function resolveShort (arg, shorthands, shortAbbr, abbrevs) {
+ // handle single-char shorthands glommed together, like
+ // npm ls -glp, but only if there is one dash, and only if
+ // all of the chars are single-char shorthands, and it's
+ // not a match to some other abbrev.
+ arg = arg.replace(/^-+/, '')
+
+ // if it's an exact known option, then don't go any further
+ if (abbrevs[arg] === arg)
+ return null
+
+ // if it's an exact known shortopt, same deal
+ if (shorthands[arg]) {
+ // make it an array, if it's a list of words
+ if (shorthands[arg] && !Array.isArray(shorthands[arg]))
+ shorthands[arg] = shorthands[arg].split(/\s+/)
+
+ return shorthands[arg]
+ }
+
+ // first check to see if this arg is a set of single-char shorthands
+ var singles = shorthands.___singles
+ if (!singles) {
+ singles = Object.keys(shorthands).filter(function (s) {
+ return s.length === 1
+ }).reduce(function (l,r) {
+ l[r] = true
+ return l
+ }, {})
+ shorthands.___singles = singles
+ debug('shorthand singles', singles)
+ }
+
+ var chrs = arg.split("").filter(function (c) {
+ return singles[c]
+ })
+
+ if (chrs.join("") === arg) return chrs.map(function (c) {
+ return shorthands[c]
+ }).reduce(function (l, r) {
+ return l.concat(r)
+ }, [])
+
+
+ // if it's an arg abbrev, and not a literal shorthand, then prefer the arg
+ if (abbrevs[arg] && !shorthands[arg])
+ return null
+
+ // if it's an abbr for a shorthand, then use that
+ if (shortAbbr[arg])
+ arg = shortAbbr[arg]
+
+ // make it an array, if it's a list of words
+ if (shorthands[arg] && !Array.isArray(shorthands[arg]))
+ shorthands[arg] = shorthands[arg].split(/\s+/)
+
+ return shorthands[arg]
+}
+
+
+/***/ }),
+/* 402 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+// Approach:
+//
+// 1. Get the minimatch set
+// 2. For each pattern in the set, PROCESS(pattern, false)
+// 3. Store matches per-set, then uniq them
+//
+// PROCESS(pattern, inGlobStar)
+// Get the first [n] items from pattern that are all strings
+// Join these together. This is PREFIX.
+// If there is no more remaining, then stat(PREFIX) and
+// add to matches if it succeeds. END.
+//
+// If inGlobStar and PREFIX is symlink and points to dir
+// set ENTRIES = []
+// else readdir(PREFIX) as ENTRIES
+// If fail, END
+//
+// with ENTRIES
+// If pattern[n] is GLOBSTAR
+// // handle the case where the globstar match is empty
+// // by pruning it out, and testing the resulting pattern
+// PROCESS(pattern[0..n] + pattern[n+1 .. $], false)
+// // handle other cases.
+// for ENTRY in ENTRIES (not dotfiles)
+// // attach globstar + tail onto the entry
+// // Mark that this entry is a globstar match
+// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true)
+//
+// else // not globstar
+// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot)
+// Test ENTRY against pattern[n]
+// If fails, continue
+// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $])
+//
+// Caveat:
+// Cache all stats and readdirs results to minimize syscall. Since all
+// we ever care about is existence and directory-ness, we can just keep
+// `true` for files, and [children,...] for directories, or `false` for
+// things that don't exist.
+
+module.exports = glob
+
+var fs = __webpack_require__(747)
+var rp = __webpack_require__(302)
+var minimatch = __webpack_require__(93)
+var Minimatch = minimatch.Minimatch
+var inherits = __webpack_require__(689)
+var EE = __webpack_require__(614).EventEmitter
+var path = __webpack_require__(622)
+var assert = __webpack_require__(357)
+var isAbsolute = __webpack_require__(681)
+var globSync = __webpack_require__(544)
+var common = __webpack_require__(856)
+var alphasort = common.alphasort
+var alphasorti = common.alphasorti
+var setopts = common.setopts
+var ownProp = common.ownProp
+var inflight = __webpack_require__(674)
+var util = __webpack_require__(669)
+var childrenIgnored = common.childrenIgnored
+var isIgnored = common.isIgnored
+
+var once = __webpack_require__(49)
+
+function glob (pattern, options, cb) {
+ if (typeof options === 'function') cb = options, options = {}
+ if (!options) options = {}
+
+ if (options.sync) {
+ if (cb)
+ throw new TypeError('callback provided to sync glob')
+ return globSync(pattern, options)
+ }
+
+ return new Glob(pattern, options, cb)
+}
+
+glob.sync = globSync
+var GlobSync = glob.GlobSync = globSync.GlobSync
+
+// old api surface
+glob.glob = glob
+
+function extend (origin, add) {
+ if (add === null || typeof add !== 'object') {
+ return origin
+ }
+
+ var keys = Object.keys(add)
+ var i = keys.length
+ while (i--) {
+ origin[keys[i]] = add[keys[i]]
+ }
+ return origin
+}
+
+glob.hasMagic = function (pattern, options_) {
+ var options = extend({}, options_)
+ options.noprocess = true
+
+ var g = new Glob(pattern, options)
+ var set = g.minimatch.set
+
+ if (!pattern)
+ return false
+
+ if (set.length > 1)
+ return true
+
+ for (var j = 0; j < set[0].length; j++) {
+ if (typeof set[0][j] !== 'string')
+ return true
+ }
+
+ return false
+}
+
+glob.Glob = Glob
+inherits(Glob, EE)
+function Glob (pattern, options, cb) {
+ if (typeof options === 'function') {
+ cb = options
+ options = null
+ }
+
+ if (options && options.sync) {
+ if (cb)
+ throw new TypeError('callback provided to sync glob')
+ return new GlobSync(pattern, options)
+ }
+
+ if (!(this instanceof Glob))
+ return new Glob(pattern, options, cb)
+
+ setopts(this, pattern, options)
+ this._didRealPath = false
+
+ // process each pattern in the minimatch set
+ var n = this.minimatch.set.length
+
+ // The matches are stored as {: true,...} so that
+ // duplicates are automagically pruned.
+ // Later, we do an Object.keys() on these.
+ // Keep them as a list so we can fill in when nonull is set.
+ this.matches = new Array(n)
+
+ if (typeof cb === 'function') {
+ cb = once(cb)
+ this.on('error', cb)
+ this.on('end', function (matches) {
+ cb(null, matches)
+ })
+ }
+
+ var self = this
+ this._processing = 0
+
+ this._emitQueue = []
+ this._processQueue = []
+ this.paused = false
+
+ if (this.noprocess)
+ return this
+
+ if (n === 0)
+ return done()
+
+ var sync = true
+ for (var i = 0; i < n; i ++) {
+ this._process(this.minimatch.set[i], i, false, done)
+ }
+ sync = false
+
+ function done () {
+ --self._processing
+ if (self._processing <= 0) {
+ if (sync) {
+ process.nextTick(function () {
+ self._finish()
+ })
+ } else {
+ self._finish()
+ }
+ }
+ }
+}
+
+Glob.prototype._finish = function () {
+ assert(this instanceof Glob)
+ if (this.aborted)
+ return
+
+ if (this.realpath && !this._didRealpath)
+ return this._realpath()
+
+ common.finish(this)
+ this.emit('end', this.found)
+}
+
+Glob.prototype._realpath = function () {
+ if (this._didRealpath)
+ return
+
+ this._didRealpath = true
+
+ var n = this.matches.length
+ if (n === 0)
+ return this._finish()
+
+ var self = this
+ for (var i = 0; i < this.matches.length; i++)
+ this._realpathSet(i, next)
+
+ function next () {
+ if (--n === 0)
+ self._finish()
+ }
+}
+
+Glob.prototype._realpathSet = function (index, cb) {
+ var matchset = this.matches[index]
+ if (!matchset)
+ return cb()
+
+ var found = Object.keys(matchset)
+ var self = this
+ var n = found.length
+
+ if (n === 0)
+ return cb()
+
+ var set = this.matches[index] = Object.create(null)
+ found.forEach(function (p, i) {
+ // If there's a problem with the stat, then it means that
+ // one or more of the links in the realpath couldn't be
+ // resolved. just return the abs value in that case.
+ p = self._makeAbs(p)
+ rp.realpath(p, self.realpathCache, function (er, real) {
+ if (!er)
+ set[real] = true
+ else if (er.syscall === 'stat')
+ set[p] = true
+ else
+ self.emit('error', er) // srsly wtf right here
+
+ if (--n === 0) {
+ self.matches[index] = set
+ cb()
+ }
+ })
+ })
+}
+
+Glob.prototype._mark = function (p) {
+ return common.mark(this, p)
+}
+
+Glob.prototype._makeAbs = function (f) {
+ return common.makeAbs(this, f)
+}
+
+Glob.prototype.abort = function () {
+ this.aborted = true
+ this.emit('abort')
+}
+
+Glob.prototype.pause = function () {
+ if (!this.paused) {
+ this.paused = true
+ this.emit('pause')
+ }
+}
+
+Glob.prototype.resume = function () {
+ if (this.paused) {
+ this.emit('resume')
+ this.paused = false
+ if (this._emitQueue.length) {
+ var eq = this._emitQueue.slice(0)
+ this._emitQueue.length = 0
+ for (var i = 0; i < eq.length; i ++) {
+ var e = eq[i]
+ this._emitMatch(e[0], e[1])
+ }
+ }
+ if (this._processQueue.length) {
+ var pq = this._processQueue.slice(0)
+ this._processQueue.length = 0
+ for (var i = 0; i < pq.length; i ++) {
+ var p = pq[i]
+ this._processing--
+ this._process(p[0], p[1], p[2], p[3])
+ }
+ }
+ }
+}
+
+Glob.prototype._process = function (pattern, index, inGlobStar, cb) {
+ assert(this instanceof Glob)
+ assert(typeof cb === 'function')
+
+ if (this.aborted)
+ return
+
+ this._processing++
+ if (this.paused) {
+ this._processQueue.push([pattern, index, inGlobStar, cb])
+ return
+ }
+
+ //console.error('PROCESS %d', this._processing, pattern)
+
+ // Get the first [n] parts of pattern that are all strings.
+ var n = 0
+ while (typeof pattern[n] === 'string') {
+ n ++
+ }
+ // now n is the index of the first one that is *not* a string.
+
+ // see if there's anything else
+ var prefix
+ switch (n) {
+ // if not, then this is rather simple
+ case pattern.length:
+ this._processSimple(pattern.join('/'), index, cb)
+ return
+
+ case 0:
+ // pattern *starts* with some non-trivial item.
+ // going to readdir(cwd), but not include the prefix in matches.
+ prefix = null
+ break
+
+ default:
+ // pattern has some string bits in the front.
+ // whatever it starts with, whether that's 'absolute' like /foo/bar,
+ // or 'relative' like '../baz'
+ prefix = pattern.slice(0, n).join('/')
+ break
+ }
+
+ var remain = pattern.slice(n)
+
+ // get the list of entries.
+ var read
+ if (prefix === null)
+ read = '.'
+ else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {
+ if (!prefix || !isAbsolute(prefix))
+ prefix = '/' + prefix
+ read = prefix
+ } else
+ read = prefix
+
+ var abs = this._makeAbs(read)
+
+ //if ignored, skip _processing
+ if (childrenIgnored(this, read))
+ return cb()
+
+ var isGlobStar = remain[0] === minimatch.GLOBSTAR
+ if (isGlobStar)
+ this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb)
+ else
+ this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb)
+}
+
+Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) {
+ var self = this
+ this._readdir(abs, inGlobStar, function (er, entries) {
+ return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
+ })
+}
+
+Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
+
+ // if the abs isn't a dir, then nothing can match!
+ if (!entries)
+ return cb()
+
+ // It will only match dot entries if it starts with a dot, or if
+ // dot is set. Stuff like @(.foo|.bar) isn't allowed.
+ var pn = remain[0]
+ var negate = !!this.minimatch.negate
+ var rawGlob = pn._glob
+ var dotOk = this.dot || rawGlob.charAt(0) === '.'
+
+ var matchedEntries = []
+ for (var i = 0; i < entries.length; i++) {
+ var e = entries[i]
+ if (e.charAt(0) !== '.' || dotOk) {
+ var m
+ if (negate && !prefix) {
+ m = !e.match(pn)
+ } else {
+ m = e.match(pn)
+ }
+ if (m)
+ matchedEntries.push(e)
+ }
+ }
+
+ //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries)
+
+ var len = matchedEntries.length
+ // If there are no matched entries, then nothing matches.
+ if (len === 0)
+ return cb()
+
+ // if this is the last remaining pattern bit, then no need for
+ // an additional stat *unless* the user has specified mark or
+ // stat explicitly. We know they exist, since readdir returned
+ // them.
+
+ if (remain.length === 1 && !this.mark && !this.stat) {
+ if (!this.matches[index])
+ this.matches[index] = Object.create(null)
+
+ for (var i = 0; i < len; i ++) {
+ var e = matchedEntries[i]
+ if (prefix) {
+ if (prefix !== '/')
+ e = prefix + '/' + e
+ else
+ e = prefix + e
+ }
+
+ if (e.charAt(0) === '/' && !this.nomount) {
+ e = path.join(this.root, e)
+ }
+ this._emitMatch(index, e)
+ }
+ // This was the last one, and no stats were needed
+ return cb()
+ }
+
+ // now test all matched entries as stand-ins for that part
+ // of the pattern.
+ remain.shift()
+ for (var i = 0; i < len; i ++) {
+ var e = matchedEntries[i]
+ var newPattern
+ if (prefix) {
+ if (prefix !== '/')
+ e = prefix + '/' + e
+ else
+ e = prefix + e
+ }
+ this._process([e].concat(remain), index, inGlobStar, cb)
+ }
+ cb()
+}
+
+Glob.prototype._emitMatch = function (index, e) {
+ if (this.aborted)
+ return
+
+ if (isIgnored(this, e))
+ return
+
+ if (this.paused) {
+ this._emitQueue.push([index, e])
+ return
+ }
+
+ var abs = isAbsolute(e) ? e : this._makeAbs(e)
+
+ if (this.mark)
+ e = this._mark(e)
+
+ if (this.absolute)
+ e = abs
+
+ if (this.matches[index][e])
+ return
+
+ if (this.nodir) {
+ var c = this.cache[abs]
+ if (c === 'DIR' || Array.isArray(c))
+ return
+ }
+
+ this.matches[index][e] = true
+
+ var st = this.statCache[abs]
+ if (st)
+ this.emit('stat', e, st)
+
+ this.emit('match', e)
+}
+
+Glob.prototype._readdirInGlobStar = function (abs, cb) {
+ if (this.aborted)
+ return
+
+ // follow all symlinked directories forever
+ // just proceed as if this is a non-globstar situation
+ if (this.follow)
+ return this._readdir(abs, false, cb)
+
+ var lstatkey = 'lstat\0' + abs
+ var self = this
+ var lstatcb = inflight(lstatkey, lstatcb_)
+
+ if (lstatcb)
+ fs.lstat(abs, lstatcb)
+
+ function lstatcb_ (er, lstat) {
+ if (er && er.code === 'ENOENT')
+ return cb()
+
+ var isSym = lstat && lstat.isSymbolicLink()
+ self.symlinks[abs] = isSym
+
+ // If it's not a symlink or a dir, then it's definitely a regular file.
+ // don't bother doing a readdir in that case.
+ if (!isSym && lstat && !lstat.isDirectory()) {
+ self.cache[abs] = 'FILE'
+ cb()
+ } else
+ self._readdir(abs, false, cb)
+ }
+}
+
+Glob.prototype._readdir = function (abs, inGlobStar, cb) {
+ if (this.aborted)
+ return
+
+ cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb)
+ if (!cb)
+ return
+
+ //console.error('RD %j %j', +inGlobStar, abs)
+ if (inGlobStar && !ownProp(this.symlinks, abs))
+ return this._readdirInGlobStar(abs, cb)
+
+ if (ownProp(this.cache, abs)) {
+ var c = this.cache[abs]
+ if (!c || c === 'FILE')
+ return cb()
+
+ if (Array.isArray(c))
+ return cb(null, c)
+ }
+
+ var self = this
+ fs.readdir(abs, readdirCb(this, abs, cb))
+}
+
+function readdirCb (self, abs, cb) {
+ return function (er, entries) {
+ if (er)
+ self._readdirError(abs, er, cb)
+ else
+ self._readdirEntries(abs, entries, cb)
+ }
+}
+
+Glob.prototype._readdirEntries = function (abs, entries, cb) {
+ if (this.aborted)
+ return
+
+ // if we haven't asked to stat everything, then just
+ // assume that everything in there exists, so we can avoid
+ // having to stat it a second time.
+ if (!this.mark && !this.stat) {
+ for (var i = 0; i < entries.length; i ++) {
+ var e = entries[i]
+ if (abs === '/')
+ e = abs + e
+ else
+ e = abs + '/' + e
+ this.cache[e] = true
+ }
+ }
+
+ this.cache[abs] = entries
+ return cb(null, entries)
+}
+
+Glob.prototype._readdirError = function (f, er, cb) {
+ if (this.aborted)
+ return
+
+ // handle errors, and cache the information
+ switch (er.code) {
+ case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
+ case 'ENOTDIR': // totally normal. means it *does* exist.
+ var abs = this._makeAbs(f)
+ this.cache[abs] = 'FILE'
+ if (abs === this.cwdAbs) {
+ var error = new Error(er.code + ' invalid cwd ' + this.cwd)
+ error.path = this.cwd
+ error.code = er.code
+ this.emit('error', error)
+ this.abort()
+ }
+ break
+
+ case 'ENOENT': // not terribly unusual
+ case 'ELOOP':
+ case 'ENAMETOOLONG':
+ case 'UNKNOWN':
+ this.cache[this._makeAbs(f)] = false
+ break
+
+ default: // some unusual error. Treat as failure.
+ this.cache[this._makeAbs(f)] = false
+ if (this.strict) {
+ this.emit('error', er)
+ // If the error is handled, then we abort
+ // if not, we threw out of here
+ this.abort()
+ }
+ if (!this.silent)
+ console.error('glob error', er)
+ break
+ }
+
+ return cb()
+}
+
+Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) {
+ var self = this
+ this._readdir(abs, inGlobStar, function (er, entries) {
+ self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
+ })
+}
+
+
+Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
+ //console.error('pgs2', prefix, remain[0], entries)
+
+ // no entries means not a dir, so it can never have matches
+ // foo.txt/** doesn't match foo.txt
+ if (!entries)
+ return cb()
+
+ // test without the globstar, and with every child both below
+ // and replacing the globstar.
+ var remainWithoutGlobStar = remain.slice(1)
+ var gspref = prefix ? [ prefix ] : []
+ var noGlobStar = gspref.concat(remainWithoutGlobStar)
+
+ // the noGlobStar pattern exits the inGlobStar state
+ this._process(noGlobStar, index, false, cb)
+
+ var isSym = this.symlinks[abs]
+ var len = entries.length
+
+ // If it's a symlink, and we're in a globstar, then stop
+ if (isSym && inGlobStar)
+ return cb()
+
+ for (var i = 0; i < len; i++) {
+ var e = entries[i]
+ if (e.charAt(0) === '.' && !this.dot)
+ continue
+
+ // these two cases enter the inGlobStar state
+ var instead = gspref.concat(entries[i], remainWithoutGlobStar)
+ this._process(instead, index, true, cb)
+
+ var below = gspref.concat(entries[i], remain)
+ this._process(below, index, true, cb)
+ }
+
+ cb()
+}
+
+Glob.prototype._processSimple = function (prefix, index, cb) {
+ // XXX review this. Shouldn't it be doing the mounting etc
+ // before doing stat? kinda weird?
+ var self = this
+ this._stat(prefix, function (er, exists) {
+ self._processSimple2(prefix, index, er, exists, cb)
+ })
+}
+Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) {
+
+ //console.error('ps2', prefix, exists)
+
+ if (!this.matches[index])
+ this.matches[index] = Object.create(null)
+
+ // If it doesn't exist, then just mark the lack of results
+ if (!exists)
+ return cb()
+
+ if (prefix && isAbsolute(prefix) && !this.nomount) {
+ var trail = /[\/\\]$/.test(prefix)
+ if (prefix.charAt(0) === '/') {
+ prefix = path.join(this.root, prefix)
+ } else {
+ prefix = path.resolve(this.root, prefix)
+ if (trail)
+ prefix += '/'
+ }
+ }
+
+ if (process.platform === 'win32')
+ prefix = prefix.replace(/\\/g, '/')
+
+ // Mark this as a match
+ this._emitMatch(index, prefix)
+ cb()
+}
+
+// Returns either 'DIR', 'FILE', or false
+Glob.prototype._stat = function (f, cb) {
+ var abs = this._makeAbs(f)
+ var needDir = f.slice(-1) === '/'
+
+ if (f.length > this.maxLength)
+ return cb()
+
+ if (!this.stat && ownProp(this.cache, abs)) {
+ var c = this.cache[abs]
+
+ if (Array.isArray(c))
+ c = 'DIR'
+
+ // It exists, but maybe not how we need it
+ if (!needDir || c === 'DIR')
+ return cb(null, c)
+
+ if (needDir && c === 'FILE')
+ return cb()
+
+ // otherwise we have to stat, because maybe c=true
+ // if we know it exists, but not what it is.
+ }
+
+ var exists
+ var stat = this.statCache[abs]
+ if (stat !== undefined) {
+ if (stat === false)
+ return cb(null, stat)
+ else {
+ var type = stat.isDirectory() ? 'DIR' : 'FILE'
+ if (needDir && type === 'FILE')
+ return cb()
+ else
+ return cb(null, type, stat)
+ }
+ }
+
+ var self = this
+ var statcb = inflight('stat\0' + abs, lstatcb_)
+ if (statcb)
+ fs.lstat(abs, statcb)
+
+ function lstatcb_ (er, lstat) {
+ if (lstat && lstat.isSymbolicLink()) {
+ // If it's a symlink, then treat it as the target, unless
+ // the target does not exist, then treat it as a file.
+ return fs.stat(abs, function (er, stat) {
+ if (er)
+ self._stat2(f, abs, null, lstat, cb)
+ else
+ self._stat2(f, abs, er, stat, cb)
+ })
+ } else {
+ self._stat2(f, abs, er, lstat, cb)
+ }
+ }
+}
+
+Glob.prototype._stat2 = function (f, abs, er, stat, cb) {
+ if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
+ this.statCache[abs] = false
+ return cb()
+ }
+
+ var needDir = f.slice(-1) === '/'
+ this.statCache[abs] = stat
+
+ if (abs.slice(-1) === '/' && stat && !stat.isDirectory())
+ return cb(null, false, stat)
+
+ var c = true
+ if (stat)
+ c = stat.isDirectory() ? 'DIR' : 'FILE'
+ this.cache[abs] = this.cache[abs] || c
+
+ if (needDir && c === 'FILE')
+ return cb()
+
+ return cb(null, c, stat)
+}
+
+
+/***/ }),
+/* 403 */,
+/* 404 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+const Range = __webpack_require__(124)
+const { ANY } = __webpack_require__(174)
+const satisfies = __webpack_require__(310)
+const compare = __webpack_require__(874)
+
+// Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff:
+// - Every simple range `r1, r2, ...` is a subset of some `R1, R2, ...`
+//
+// Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff:
+// - If c is only the ANY comparator
+// - If C is only the ANY comparator, return true
+// - Else return false
+// - Let EQ be the set of = comparators in c
+// - If EQ is more than one, return true (null set)
+// - Let GT be the highest > or >= comparator in c
+// - Let LT be the lowest < or <= comparator in c
+// - If GT and LT, and GT.semver > LT.semver, return true (null set)
+// - If EQ
+// - If GT, and EQ does not satisfy GT, return true (null set)
+// - If LT, and EQ does not satisfy LT, return true (null set)
+// - If EQ satisfies every C, return true
+// - Else return false
+// - If GT
+// - If GT.semver is lower than any > or >= comp in C, return false
+// - If GT is >=, and GT.semver does not satisfy every C, return false
+// - If LT
+// - If LT.semver is greater than any < or <= comp in C, return false
+// - If LT is <=, and LT.semver does not satisfy every C, return false
+// - If any C is a = range, and GT or LT are set, return false
+// - Else return true
+
+const subset = (sub, dom, options) => {
+ if (sub === dom)
+ return true
+
+ sub = new Range(sub, options)
+ dom = new Range(dom, options)
+ let sawNonNull = false
+
+ OUTER: for (const simpleSub of sub.set) {
+ for (const simpleDom of dom.set) {
+ const isSub = simpleSubset(simpleSub, simpleDom, options)
+ sawNonNull = sawNonNull || isSub !== null
+ if (isSub)
+ continue OUTER
+ }
+ // the null set is a subset of everything, but null simple ranges in
+ // a complex range should be ignored. so if we saw a non-null range,
+ // then we know this isn't a subset, but if EVERY simple range was null,
+ // then it is a subset.
+ if (sawNonNull)
+ return false
+ }
+ return true
+}
-// Copyright (c) Microsoft Corporation. All rights reserved.
-// Licensed under the MIT License.
-/**
- * ONLY AVAILABLE IN NODE.JS RUNTIME.
- *
- * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value
- * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the
- * values are set, this should be serialized with toString and set as the permissions field on an
- * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but
- * the order of the permissions is particular and this class guarantees correctness.
- *
- * @export
- * @class AccountSASPermissions
- */
-var AccountSASPermissions = /** @class */ (function () {
- function AccountSASPermissions() {
- /**
- * Permission to read resources and list queues and tables granted.
- *
- * @type {boolean}
- * @memberof AccountSASPermissions
- */
- this.read = false;
- /**
- * Permission to write resources granted.
- *
- * @type {boolean}
- * @memberof AccountSASPermissions
- */
- this.write = false;
- /**
- * Permission to create blobs and files granted.
- *
- * @type {boolean}
- * @memberof AccountSASPermissions
- */
- this.delete = false;
- /**
- * Permission to delete versions granted.
- *
- * @type {boolean}
- * @memberof AccountSASPermissions
- */
- this.deleteVersion = false;
- /**
- * Permission to list blob containers, blobs, shares, directories, and files granted.
- *
- * @type {boolean}
- * @memberof AccountSASPermissions
- */
- this.list = false;
- /**
- * Permission to add messages, table entities, and append to blobs granted.
- *
- * @type {boolean}
- * @memberof AccountSASPermissions
- */
- this.add = false;
- /**
- * Permission to create blobs and files granted.
- *
- * @type {boolean}
- * @memberof AccountSASPermissions
- */
- this.create = false;
- /**
- * Permissions to update messages and table entities granted.
- *
- * @type {boolean}
- * @memberof AccountSASPermissions
- */
- this.update = false;
- /**
- * Permission to get and delete messages granted.
- *
- * @type {boolean}
- * @memberof AccountSASPermissions
- */
- this.process = false;
- /**
- * Specfies Tag access granted.
- *
- * @type {boolean}
- * @memberof AccountSASPermissions
- */
- this.tag = false;
- /**
- * Permission to filter blobs.
- *
- * @type {boolean}
- * @memberof AccountSASPermissions
- */
- this.filter = false;
+const simpleSubset = (sub, dom, options) => {
+ if (sub === dom)
+ return true
+
+ if (sub.length === 1 && sub[0].semver === ANY)
+ return dom.length === 1 && dom[0].semver === ANY
+
+ const eqSet = new Set()
+ let gt, lt
+ for (const c of sub) {
+ if (c.operator === '>' || c.operator === '>=')
+ gt = higherGT(gt, c, options)
+ else if (c.operator === '<' || c.operator === '<=')
+ lt = lowerLT(lt, c, options)
+ else
+ eqSet.add(c.semver)
+ }
+
+ if (eqSet.size > 1)
+ return null
+
+ let gtltComp
+ if (gt && lt) {
+ gtltComp = compare(gt.semver, lt.semver, options)
+ if (gtltComp > 0)
+ return null
+ else if (gtltComp === 0 && (gt.operator !== '>=' || lt.operator !== '<='))
+ return null
+ }
+
+ // will iterate one or zero times
+ for (const eq of eqSet) {
+ if (gt && !satisfies(eq, String(gt), options))
+ return null
+
+ if (lt && !satisfies(eq, String(lt), options))
+ return null
+
+ for (const c of dom) {
+ if (!satisfies(eq, String(c), options))
+ return false
}
- /**
- * Parse initializes the AccountSASPermissions fields from a string.
- *
- * @static
- * @param {string} permissions
- * @returns {AccountSASPermissions}
- * @memberof AccountSASPermissions
- */
- AccountSASPermissions.parse = function (permissions) {
- var accountSASPermissions = new AccountSASPermissions();
- for (var _i = 0, permissions_1 = permissions; _i < permissions_1.length; _i++) {
- var c = permissions_1[_i];
- switch (c) {
- case "r":
- accountSASPermissions.read = true;
- break;
- case "w":
- accountSASPermissions.write = true;
- break;
- case "d":
- accountSASPermissions.delete = true;
- break;
- case "x":
- accountSASPermissions.deleteVersion = true;
- break;
- case "l":
- accountSASPermissions.list = true;
- break;
- case "a":
- accountSASPermissions.add = true;
- break;
- case "c":
- accountSASPermissions.create = true;
- break;
- case "u":
- accountSASPermissions.update = true;
- break;
- case "p":
- accountSASPermissions.process = true;
- break;
- case "t":
- accountSASPermissions.tag = true;
- break;
- case "f":
- accountSASPermissions.filter = true;
- break;
- default:
- throw new RangeError("Invalid permission character: " + c);
- }
- }
- return accountSASPermissions;
- };
- /**
- * Produces the SAS permissions string for an Azure Storage account.
- * Call this method to set AccountSASSignatureValues Permissions field.
- *
- * Using this method will guarantee the resource types are in
- * an order accepted by the service.
- *
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas
- *
- * @returns {string}
- * @memberof AccountSASPermissions
- */
- AccountSASPermissions.prototype.toString = function () {
- // The order of the characters should be as specified here to ensure correctness:
- // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas
- // Use a string array instead of string concatenating += operator for performance
- var permissions = [];
- if (this.read) {
- permissions.push("r");
- }
- if (this.write) {
- permissions.push("w");
- }
- if (this.delete) {
- permissions.push("d");
- }
- if (this.deleteVersion) {
- permissions.push("x");
- }
- if (this.filter) {
- permissions.push("f");
- }
- if (this.tag) {
- permissions.push("t");
- }
- if (this.list) {
- permissions.push("l");
- }
- if (this.add) {
- permissions.push("a");
- }
- if (this.create) {
- permissions.push("c");
- }
- if (this.update) {
- permissions.push("u");
- }
- if (this.process) {
- permissions.push("p");
- }
- return permissions.join("");
- };
- return AccountSASPermissions;
-}());
-// Copyright (c) Microsoft Corporation. All rights reserved.
-// Licensed under the MIT License.
-/**
- * ONLY AVAILABLE IN NODE.JS RUNTIME.
- *
- * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value
- * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the
- * values are set, this should be serialized with toString and set as the resources field on an
- * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but
- * the order of the resources is particular and this class guarantees correctness.
- *
- * @export
- * @class AccountSASResourceTypes
- */
-var AccountSASResourceTypes = /** @class */ (function () {
- function AccountSASResourceTypes() {
- /**
- * Permission to access service level APIs granted.
- *
- * @type {boolean}
- * @memberof AccountSASResourceTypes
- */
- this.service = false;
- /**
- * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted.
- *
- * @type {boolean}
- * @memberof AccountSASResourceTypes
- */
- this.container = false;
- /**
- * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted.
- *
- * @type {boolean}
- * @memberof AccountSASResourceTypes
- */
- this.object = false;
+ return true
+ }
+
+ let higher, lower
+ let hasDomLT, hasDomGT
+ for (const c of dom) {
+ hasDomGT = hasDomGT || c.operator === '>' || c.operator === '>='
+ hasDomLT = hasDomLT || c.operator === '<' || c.operator === '<='
+ if (gt) {
+ if (c.operator === '>' || c.operator === '>=') {
+ higher = higherGT(gt, c, options)
+ if (higher === c && higher !== gt)
+ return false
+ } else if (gt.operator === '>=' && !satisfies(gt.semver, String(c), options))
+ return false
}
- /**
- * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an
- * Error if it encounters a character that does not correspond to a valid resource type.
- *
- * @static
- * @param {string} resourceTypes
- * @returns {AccountSASResourceTypes}
- * @memberof AccountSASResourceTypes
- */
- AccountSASResourceTypes.parse = function (resourceTypes) {
- var accountSASResourceTypes = new AccountSASResourceTypes();
- for (var _i = 0, resourceTypes_1 = resourceTypes; _i < resourceTypes_1.length; _i++) {
- var c = resourceTypes_1[_i];
- switch (c) {
- case "s":
- accountSASResourceTypes.service = true;
- break;
- case "c":
- accountSASResourceTypes.container = true;
- break;
- case "o":
- accountSASResourceTypes.object = true;
- break;
- default:
- throw new RangeError("Invalid resource type: " + c);
- }
- }
- return accountSASResourceTypes;
- };
- /**
- * Converts the given resource types to a string.
- *
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas
- *
- * @returns {string}
- * @memberof AccountSASResourceTypes
- */
- AccountSASResourceTypes.prototype.toString = function () {
- var resourceTypes = [];
- if (this.service) {
- resourceTypes.push("s");
- }
- if (this.container) {
- resourceTypes.push("c");
- }
- if (this.object) {
- resourceTypes.push("o");
- }
- return resourceTypes.join("");
- };
- return AccountSASResourceTypes;
-}());
+ if (lt) {
+ if (c.operator === '<' || c.operator === '<=') {
+ lower = lowerLT(lt, c, options)
+ if (lower === c && lower !== lt)
+ return false
+ } else if (lt.operator === '<=' && !satisfies(lt.semver, String(c), options))
+ return false
+ }
+ if (!c.operator && (lt || gt) && gtltComp !== 0)
+ return false
+ }
+
+ // if there was a < or >, and nothing in the dom, then must be false
+ // UNLESS it was limited by another range in the other direction.
+ // Eg, >1.0.0 <1.0.1 is still a subset of <2.0.0
+ if (gt && hasDomLT && !lt && gtltComp !== 0)
+ return false
+
+ if (lt && hasDomGT && !gt && gtltComp !== 0)
+ return false
+
+ return true
+}
+
+// >=1.2.3 is lower than >1.2.3
+const higherGT = (a, b, options) => {
+ if (!a)
+ return b
+ const comp = compare(a.semver, b.semver, options)
+ return comp > 0 ? a
+ : comp < 0 ? b
+ : b.operator === '>' && a.operator === '>=' ? b
+ : a
+}
+
+// <=1.2.3 is higher than <1.2.3
+const lowerLT = (a, b, options) => {
+ if (!a)
+ return b
+ const comp = compare(a.semver, b.semver, options)
+ return comp < 0 ? a
+ : comp > 0 ? b
+ : b.operator === '<' && a.operator === '<=' ? b
+ : a
+}
+
+module.exports = subset
+
+
+/***/ }),
+/* 405 */,
+/* 406 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+// On windows, create a .cmd file.
+// Read the #! in the file to see what it uses. The vast majority
+// of the time, this will be either:
+// "#!/usr/bin/env "
+// or:
+// "#! "
+//
+// Write a binroot/pkg.bin + ".cmd" file that has this line in it:
+// @ %dp0% %*
+
+module.exports = cmdShim
+cmdShim.ifExists = cmdShimIfExists
+
+var fs = __webpack_require__(598)
+
+var mkdir = __webpack_require__(626)
+ , path = __webpack_require__(622)
+ , toBatchSyntax = __webpack_require__(513)
+ , shebangExpr = /^#\!\s*(?:\/usr\/bin\/env)?\s*([^ \t]+=[^ \t]+\s+)*\s*([^ \t]+)(.*)$/
+
+function cmdShimIfExists (from, to, cb) {
+ fs.stat(from, function (er) {
+ if (er) return cb()
+ cmdShim(from, to, cb)
+ })
+}
+
+// Try to unlink, but ignore errors.
+// Any problems will surface later.
+function rm (path, cb) {
+ fs.unlink(path, function(er) {
+ cb()
+ })
+}
+
+function cmdShim (from, to, cb) {
+ fs.stat(from, function (er, stat) {
+ if (er)
+ return cb(er)
+
+ cmdShim_(from, to, cb)
+ })
+}
+
+function cmdShim_ (from, to, cb) {
+ var then = times(3, next, cb)
+ rm(to, then)
+ rm(to + ".cmd", then)
+ rm(to + ".ps1", then)
+
+ function next(er) {
+ writeShim(from, to, cb)
+ }
+}
+
+function writeShim (from, to, cb) {
+ // make a cmd file and a sh script
+ // First, check if the bin is a #! of some sort.
+ // If not, then assume it's something that'll be compiled, or some other
+ // sort of script, and just call it directly.
+ mkdir(path.dirname(to), function (er) {
+ if (er)
+ return cb(er)
+ fs.readFile(from, "utf8", function (er, data) {
+ if (er) return writeShim_(from, to, null, null, null, cb)
+ var firstLine = data.trim().split(/\r*\n/)[0]
+ , shebang = firstLine.match(shebangExpr)
+ if (!shebang) return writeShim_(from, to, null, null, null, cb)
+ var vars = shebang[1] || ""
+ , prog = shebang[2]
+ , args = shebang[3] || ""
+ return writeShim_(from, to, prog, args, vars, cb)
+ })
+ })
+}
+
+
+function writeShim_ (from, to, prog, args, variables, cb) {
+ var shTarget = path.relative(path.dirname(to), from)
+ , target = shTarget.split("/").join("\\")
+ , longProg
+ , shProg = prog && prog.split("\\").join("/")
+ , shLongProg
+ , pwshProg = shProg && "\"" + shProg + "$exe\""
+ , pwshLongProg
+ shTarget = shTarget.split("\\").join("/")
+ args = args || ""
+ variables = variables || ""
+ if (!prog) {
+ prog = "\"%dp0%\\" + target + "\""
+ shProg = "\"$basedir/" + shTarget + "\""
+ pwshProg = shProg
+ args = ""
+ target = ""
+ shTarget = ""
+ } else {
+ longProg = "\"%dp0%\\" + prog + ".exe\""
+ shLongProg = "\"$basedir/" + prog + "\""
+ pwshLongProg = "\"$basedir/" + prog + "$exe\""
+ target = "\"%dp0%\\" + target + "\""
+ shTarget = "\"$basedir/" + shTarget + "\""
+ }
+
+ // @SETLOCAL
+ // @CALL :find_dp0
+ //
+ // @IF EXIST "%dp0%\node.exe" (
+ // @SET "_prog=%dp0%\node.exe"
+ // ) ELSE (
+ // @SET "_prog=node"
+ // @SET PATHEXT=%PATHEXT:;.JS;=;%
+ // )
+ //
+ // "%_prog%" "%dp0%\.\node_modules\npm\bin\npm-cli.js" %*
+ // @ENDLOCAL
+ // @EXIT /b %errorlevel%
+ //
+ // :find_dp0
+ // SET dp0=%~dp0
+ // EXIT /b
+ //
+ // Subroutine trick to fix https://github.com/npm/cmd-shim/issues/10
+ var head = '@ECHO off\r\n' +
+ 'SETLOCAL\r\n' +
+ 'CALL :find_dp0\r\n'
+ var foot = 'ENDLOCAL\r\n' +
+ 'EXIT /b %errorlevel%\r\n' +
+ ':find_dp0\r\n' +
+ 'SET dp0=%~dp0\r\n' +
+ 'EXIT /b\r\n'
+
+ var cmd
+ if (longProg) {
+ shLongProg = shLongProg.trim();
+ args = args.trim();
+ var variableDeclarationsAsBatch = toBatchSyntax.convertToSetCommands(variables)
+ cmd = head
+ + variableDeclarationsAsBatch
+ + "\r\n"
+ + "IF EXIST " + longProg + " (\r\n"
+ + " SET \"_prog=" + longProg.replace(/(^")|("$)/g, '') + "\"\r\n"
+ + ") ELSE (\r\n"
+ + " SET \"_prog=" + prog.replace(/(^")|("$)/g, '') + "\"\r\n"
+ + " SET PATHEXT=%PATHEXT:;.JS;=;%\r\n"
+ + ")\r\n"
+ + "\r\n"
+ + "\"%_prog%\" " + args + " " + target + " %*\r\n"
+ + foot
+ } else {
+ cmd = head + prog + " " + args + " " + target + " %*\r\n" + foot
+ }
+
+ // #!/bin/sh
+ // basedir=`dirname "$0"`
+ //
+ // case `uname` in
+ // *CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;;
+ // esac
+ //
+ // if [ -x "$basedir/node.exe" ]; then
+ // "$basedir/node.exe" "$basedir/node_modules/npm/bin/npm-cli.js" "$@"
+ // ret=$?
+ // else
+ // node "$basedir/node_modules/npm/bin/npm-cli.js" "$@"
+ // ret=$?
+ // fi
+ // exit $ret
+
+ var sh = "#!/bin/sh\n"
+
+ sh = sh
+ + "basedir=$(dirname \"$(echo \"$0\" | sed -e 's,\\\\,/,g')\")\n"
+ + "\n"
+ + "case `uname` in\n"
+ + " *CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w \"$basedir\"`;;\n"
+ + "esac\n"
+ + "\n"
+
+ if (shLongProg) {
+ sh = sh
+ + "if [ -x "+shLongProg+" ]; then\n"
+ + " " + variables + shLongProg + " " + args + " " + shTarget + " \"$@\"\n"
+ + " ret=$?\n"
+ + "else \n"
+ + " " + variables + shProg + " " + args + " " + shTarget + " \"$@\"\n"
+ + " ret=$?\n"
+ + "fi\n"
+ + "exit $ret\n"
+ } else {
+ sh = sh
+ + shProg + " " + args + " " + shTarget + " \"$@\"\n"
+ + "exit $?\n"
+ }
-// Copyright (c) Microsoft Corporation. All rights reserved.
-// Licensed under the MIT License.
-/**
- * ONLY AVAILABLE IN NODE.JS RUNTIME.
- *
- * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value
- * to true means that any SAS which uses these permissions will grant access to that service. Once all the
- * values are set, this should be serialized with toString and set as the services field on an
- * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but
- * the order of the services is particular and this class guarantees correctness.
- *
- * @export
- * @class AccountSASServices
- */
-var AccountSASServices = /** @class */ (function () {
- function AccountSASServices() {
- /**
- * Permission to access blob resources granted.
- *
- * @type {boolean}
- * @memberof AccountSASServices
- */
- this.blob = false;
- /**
- * Permission to access file resources granted.
- *
- * @type {boolean}
- * @memberof AccountSASServices
- */
- this.file = false;
- /**
- * Permission to access queue resources granted.
- *
- * @type {boolean}
- * @memberof AccountSASServices
- */
- this.queue = false;
- /**
- * Permission to access table resources granted.
- *
- * @type {boolean}
- * @memberof AccountSASServices
- */
- this.table = false;
- }
- /**
- * Creates an {@link AccountSASServices} from the specified services string. This method will throw an
- * Error if it encounters a character that does not correspond to a valid service.
- *
- * @static
- * @param {string} services
- * @returns {AccountSASServices}
- * @memberof AccountSASServices
- */
- AccountSASServices.parse = function (services) {
- var accountSASServices = new AccountSASServices();
- for (var _i = 0, services_1 = services; _i < services_1.length; _i++) {
- var c = services_1[_i];
- switch (c) {
- case "b":
- accountSASServices.blob = true;
- break;
- case "f":
- accountSASServices.file = true;
- break;
- case "q":
- accountSASServices.queue = true;
- break;
- case "t":
- accountSASServices.table = true;
- break;
- default:
- throw new RangeError("Invalid service character: " + c);
- }
- }
- return accountSASServices;
- };
- /**
- * Converts the given services to a string.
- *
- * @returns {string}
- * @memberof AccountSASServices
- */
- AccountSASServices.prototype.toString = function () {
- var services = [];
- if (this.blob) {
- services.push("b");
- }
- if (this.table) {
- services.push("t");
- }
- if (this.queue) {
- services.push("q");
- }
- if (this.file) {
- services.push("f");
- }
- return services.join("");
- };
- return AccountSASServices;
-}());
+ // #!/usr/bin/env pwsh
+ // $basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
+ //
+ // $ret=0
+ // $exe = ""
+ // if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
+ // # Fix case when both the Windows and Linux builds of Node
+ // # are installed in the same directory
+ // $exe = ".exe"
+ // }
+ // if (Test-Path "$basedir/node") {
+ // & "$basedir/node$exe" "$basedir/node_modules/npm/bin/npm-cli.js" $args
+ // $ret=$LASTEXITCODE
+ // } else {
+ // & "node$exe" "$basedir/node_modules/npm/bin/npm-cli.js" $args
+ // $ret=$LASTEXITCODE
+ // }
+ // exit $ret
+ var pwsh = "#!/usr/bin/env pwsh\n"
+ + "$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent\n"
+ + "\n"
+ + "$exe=\"\"\n"
+ + "if ($PSVersionTable.PSVersion -lt \"6.0\" -or $IsWindows) {\n"
+ + " # Fix case when both the Windows and Linux builds of Node\n"
+ + " # are installed in the same directory\n"
+ + " $exe=\".exe\"\n"
+ + "}\n"
+ if (shLongProg) {
+ pwsh = pwsh
+ + "$ret=0\n"
+ + "if (Test-Path " + pwshLongProg + ") {\n"
+ + " & " + pwshLongProg + " " + args + " " + shTarget + " $args\n"
+ + " $ret=$LASTEXITCODE\n"
+ + "} else {\n"
+ + " & " + pwshProg + " " + args + " " + shTarget + " $args\n"
+ + " $ret=$LASTEXITCODE\n"
+ + "}\n"
+ + "exit $ret\n"
+ } else {
+ pwsh = pwsh
+ + "& " + pwshProg + " " + args + " " + shTarget + " $args\n"
+ + "exit $LASTEXITCODE\n"
+ }
-// Copyright (c) Microsoft Corporation. All rights reserved.
-// Licensed under the MIT License.
-/**
- * Generate SasIPRange format string. For example:
- *
- * "8.8.8.8" or "1.1.1.1-255.255.255.255"
- *
- * @export
- * @param {SasIPRange} ipRange
- * @returns {string}
- */
-function ipRangeToString(ipRange) {
- return ipRange.end ? ipRange.start + "-" + ipRange.end : ipRange.start;
+ var then = times(3, next, cb)
+ fs.writeFile(to + ".ps1", pwsh, "utf8", then)
+ fs.writeFile(to + ".cmd", cmd, "utf8", then)
+ fs.writeFile(to, sh, "utf8", then)
+ function next () {
+ chmodShim(to, cb)
+ }
}
-// Copyright (c) Microsoft Corporation. All rights reserved.
-(function (SASProtocol) {
- /**
- * Protocol that allows HTTPS only
- */
- SASProtocol["Https"] = "https";
- /**
- * Protocol that allows both HTTPS and HTTP
- */
- SASProtocol["HttpsAndHttp"] = "https,http";
-})(exports.SASProtocol || (exports.SASProtocol = {}));
-/**
- * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly
- * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues}
- * types. Once generated, it can be encoded into a {@code String} and appended to a URL directly (though caution should
- * be taken here in case there are existing query parameters, which might affect the appropriate means of appending
- * these query parameters).
- *
- * NOTE: Instances of this class are immutable.
- *
- * @export
- * @class SASQueryParameters
- */
-var SASQueryParameters = /** @class */ (function () {
- /**
- * Creates an instance of SASQueryParameters.
- *
- * @param {string} version Representing the storage version
- * @param {string} signature Representing the signature for the SAS token
- * @param {string} [permissions] Representing the storage permissions
- * @param {string} [services] Representing the storage services being accessed (only for Account SAS)
- * @param {string} [resourceTypes] Representing the storage resource types being accessed (only for Account SAS)
- * @param {SASProtocol} [protocol] Representing the allowed HTTP protocol(s)
- * @param {Date} [startsOn] Representing the start time for this SAS token
- * @param {Date} [expiresOn] Representing the expiry time for this SAS token
- * @param {SasIPRange} [ipRange] Representing the range of valid IP addresses for this SAS token
- * @param {string} [identifier] Representing the signed identifier (only for Service SAS)
- * @param {string} [resource] Representing the storage container or blob (only for Service SAS)
- * @param {string} [cacheControl] Representing the cache-control header (only for Blob/File Service SAS)
- * @param {string} [contentDisposition] Representing the content-disposition header (only for Blob/File Service SAS)
- * @param {string} [contentEncoding] Representing the content-encoding header (only for Blob/File Service SAS)
- * @param {string} [contentLanguage] Representing the content-language header (only for Blob/File Service SAS)
- * @param {string} [contentType] Representing the content-type header (only for Blob/File Service SAS)
- * @param {userDelegationKey} [userDelegationKey] Representing the user delegation key properties
- * @memberof SASQueryParameters
- */
- function SASQueryParameters(version, signature, permissions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey) {
- this.version = version;
- this.services = services;
- this.resourceTypes = resourceTypes;
- this.expiresOn = expiresOn;
- this.permissions = permissions;
- this.protocol = protocol;
- this.startsOn = startsOn;
- this.ipRangeInner = ipRange;
- this.identifier = identifier;
- this.resource = resource;
- this.signature = signature;
- this.cacheControl = cacheControl;
- this.contentDisposition = contentDisposition;
- this.contentEncoding = contentEncoding;
- this.contentLanguage = contentLanguage;
- this.contentType = contentType;
- if (userDelegationKey) {
- this.signedOid = userDelegationKey.signedObjectId;
- this.signedTenantId = userDelegationKey.signedTenantId;
- this.signedStartsOn = userDelegationKey.signedStartsOn;
- this.signedExpiresOn = userDelegationKey.signedExpiresOn;
- this.signedService = userDelegationKey.signedService;
- this.signedVersion = userDelegationKey.signedVersion;
- }
- }
- Object.defineProperty(SASQueryParameters.prototype, "ipRange", {
- /**
- * Optional. IP range allowed for this SAS.
- *
- * @readonly
- * @type {(SasIPRange | undefined)}
- * @memberof SASQueryParameters
- */
- get: function () {
- if (this.ipRangeInner) {
- return {
- end: this.ipRangeInner.end,
- start: this.ipRangeInner.start
- };
- }
- return undefined;
- },
- enumerable: false,
- configurable: true
- });
- /**
- * Encodes all SAS query parameters into a string that can be appended to a URL.
- *
- * @returns {string}
- * @memberof SASQueryParameters
- */
- SASQueryParameters.prototype.toString = function () {
- var params = [
- "sv",
- "ss",
- "srt",
- "spr",
- "st",
- "se",
- "sip",
- "si",
- "skoid",
- "sktid",
- "skt",
- "ske",
- "sks",
- "skv",
- "sr",
- "sp",
- "sig",
- "rscc",
- "rscd",
- "rsce",
- "rscl",
- "rsct"
- ];
- var queries = [];
- for (var _i = 0, params_1 = params; _i < params_1.length; _i++) {
- var param = params_1[_i];
- switch (param) {
- case "sv":
- this.tryAppendQueryParameter(queries, param, this.version);
- break;
- case "ss":
- this.tryAppendQueryParameter(queries, param, this.services);
- break;
- case "srt":
- this.tryAppendQueryParameter(queries, param, this.resourceTypes);
- break;
- case "spr":
- this.tryAppendQueryParameter(queries, param, this.protocol);
- break;
- case "st":
- this.tryAppendQueryParameter(queries, param, this.startsOn ? truncatedISO8061Date(this.startsOn, false) : undefined);
- break;
- case "se":
- this.tryAppendQueryParameter(queries, param, this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : undefined);
- break;
- case "sip":
- this.tryAppendQueryParameter(queries, param, this.ipRange ? ipRangeToString(this.ipRange) : undefined);
- break;
- case "si":
- this.tryAppendQueryParameter(queries, param, this.identifier);
- break;
- case "skoid": // Signed object ID
- this.tryAppendQueryParameter(queries, param, this.signedOid);
- break;
- case "sktid": // Signed tenant ID
- this.tryAppendQueryParameter(queries, param, this.signedTenantId);
- break;
- case "skt": // Signed key start time
- this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : undefined);
- break;
- case "ske": // Signed key expiry time
- this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : undefined);
- break;
- case "sks": // Signed key service
- this.tryAppendQueryParameter(queries, param, this.signedService);
- break;
- case "skv": // Signed key version
- this.tryAppendQueryParameter(queries, param, this.signedVersion);
- break;
- case "sr":
- this.tryAppendQueryParameter(queries, param, this.resource);
- break;
- case "sp":
- this.tryAppendQueryParameter(queries, param, this.permissions);
- break;
- case "sig":
- this.tryAppendQueryParameter(queries, param, this.signature);
- break;
- case "rscc":
- this.tryAppendQueryParameter(queries, param, this.cacheControl);
- break;
- case "rscd":
- this.tryAppendQueryParameter(queries, param, this.contentDisposition);
- break;
- case "rsce":
- this.tryAppendQueryParameter(queries, param, this.contentEncoding);
- break;
- case "rscl":
- this.tryAppendQueryParameter(queries, param, this.contentLanguage);
- break;
- case "rsct":
- this.tryAppendQueryParameter(queries, param, this.contentType);
- break;
- }
- }
- return queries.join("&");
- };
- /**
- * A private helper method used to filter and append query key/value pairs into an array.
- *
- * @private
- * @param {string[]} queries
- * @param {string} key
- * @param {string} [value]
- * @returns {void}
- * @memberof SASQueryParameters
- */
- SASQueryParameters.prototype.tryAppendQueryParameter = function (queries, key, value) {
- if (!value) {
- return;
- }
- key = encodeURIComponent(key);
- value = encodeURIComponent(value);
- if (key.length > 0 && value.length > 0) {
- queries.push(key + "=" + value);
- }
- };
- return SASQueryParameters;
-}());
+function chmodShim (to, cb) {
+ var then = times(3, cb, cb)
+ fs.chmod(to, "0755", then)
+ fs.chmod(to + ".cmd", "0755", then)
+ fs.chmod(to + ".ps1", "0755", then)
+}
-// Copyright (c) Microsoft Corporation. All rights reserved.
-/**
- * ONLY AVAILABLE IN NODE.JS RUNTIME.
- *
- * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual
- * REST request.
- *
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas
- *
- * @param {AccountSASSignatureValues} accountSASSignatureValues
- * @param {StorageSharedKeyCredential} sharedKeyCredential
- * @returns {SASQueryParameters}
- * @memberof AccountSASSignatureValues
- */
-function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) {
- var version = accountSASSignatureValues.version
- ? accountSASSignatureValues.version
- : SERVICE_VERSION;
- if (accountSASSignatureValues.permissions &&
- accountSASSignatureValues.permissions.deleteVersion &&
- version < "2019-10-10") {
- throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission.");
- }
- if (accountSASSignatureValues.permissions &&
- accountSASSignatureValues.permissions.tag &&
- version < "2019-12-12") {
- throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission.");
- }
- if (accountSASSignatureValues.permissions &&
- accountSASSignatureValues.permissions.filter &&
- version < "2019-12-12") {
- throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission.");
+function times(n, ok, cb) {
+ var errState = null
+ return function(er) {
+ if (!errState) {
+ if (er)
+ cb(errState = er)
+ else if (--n === 0)
+ ok()
}
- var parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString());
- var parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString();
- var parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString();
- var stringToSign = [
- sharedKeyCredential.accountName,
- parsedPermissions,
- parsedServices,
- parsedResourceTypes,
- accountSASSignatureValues.startsOn
- ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false)
- : "",
- truncatedISO8061Date(accountSASSignatureValues.expiresOn, false),
- accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "",
- accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "",
- version,
- "" // Account SAS requires an additional newline character
- ].join("\n");
- var signature = sharedKeyCredential.computeHMACSHA256(stringToSign);
- return new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange);
+ }
+}
+
+
+/***/ }),
+/* 407 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+
+const BB = __webpack_require__(900)
+
+const contentPath = __webpack_require__(969)
+const crypto = __webpack_require__(417)
+const figgyPudding = __webpack_require__(965)
+const fixOwner = __webpack_require__(133)
+const fs = __webpack_require__(598)
+const hashToSegments = __webpack_require__(271)
+const ms = __webpack_require__(371)
+const path = __webpack_require__(622)
+const ssri = __webpack_require__(951)
+const Y = __webpack_require__(945)
+
+const indexV = __webpack_require__(525)['cache-version'].index
+
+const appendFileAsync = BB.promisify(fs.appendFile)
+const readFileAsync = BB.promisify(fs.readFile)
+const readdirAsync = BB.promisify(fs.readdir)
+const concat = ms.concat
+const from = ms.from
+
+module.exports.NotFoundError = class NotFoundError extends Error {
+ constructor (cache, key) {
+ super(Y`No cache entry for \`${key}\` found in \`${cache}\``)
+ this.code = 'ENOENT'
+ this.cache = cache
+ this.key = key
+ }
+}
+
+const IndexOpts = figgyPudding({
+ metadata: {},
+ size: {}
+})
+
+module.exports.insert = insert
+function insert (cache, key, integrity, opts) {
+ opts = IndexOpts(opts)
+ const bucket = bucketPath(cache, key)
+ const entry = {
+ key,
+ integrity: integrity && ssri.stringify(integrity),
+ time: Date.now(),
+ size: opts.size,
+ metadata: opts.metadata
+ }
+ return fixOwner.mkdirfix(
+ cache, path.dirname(bucket)
+ ).then(() => {
+ const stringified = JSON.stringify(entry)
+ // NOTE - Cleverness ahoy!
+ //
+ // This works because it's tremendously unlikely for an entry to corrupt
+ // another while still preserving the string length of the JSON in
+ // question. So, we just slap the length in there and verify it on read.
+ //
+ // Thanks to @isaacs for the whiteboarding session that ended up with this.
+ return appendFileAsync(
+ bucket, `\n${hashEntry(stringified)}\t${stringified}`
+ )
+ }).then(
+ () => fixOwner.chownr(cache, bucket)
+ ).catch({ code: 'ENOENT' }, () => {
+ // There's a class of race conditions that happen when things get deleted
+ // during fixOwner, or between the two mkdirfix/chownr calls.
+ //
+ // It's perfectly fine to just not bother in those cases and lie
+ // that the index entry was written. Because it's a cache.
+ }).then(() => {
+ return formatEntry(cache, entry)
+ })
}
-// Copyright (c) Microsoft Corporation. All rights reserved.
-// Licensed under the MIT License.
-/**
- * ONLY AVAILABLE IN NODE.JS RUNTIME.
- *
- * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting
- * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all
- * the values are set, this should be serialized with toString and set as the permissions field on a
- * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but
- * the order of the permissions is particular and this class guarantees correctness.
- *
- * @export
- * @class BlobSASPermissions
- */
-var BlobSASPermissions = /** @class */ (function () {
- function BlobSASPermissions() {
- /**
- * Specifies Read access granted.
- *
- * @type {boolean}
- * @memberof BlobSASPermissions
- */
- this.read = false;
- /**
- * Specifies Add access granted.
- *
- * @type {boolean}
- * @memberof BlobSASPermissions
- */
- this.add = false;
- /**
- * Specifies Create access granted.
- *
- * @type {boolean}
- * @memberof BlobSASPermissions
- */
- this.create = false;
- /**
- * Specifies Write access granted.
- *
- * @type {boolean}
- * @memberof BlobSASPermissions
- */
- this.write = false;
- /**
- * Specifies Delete access granted.
- *
- * @type {boolean}
- * @memberof BlobSASPermissions
- */
- this.delete = false;
- /**
- * Specifies Delete version access granted.
- *
- * @type {boolean}
- * @memberof BlobSASPermissions
- */
- this.deleteVersion = false;
- /**
- * Specfies Tag access granted.
- *
- * @type {boolean}
- * @memberof BlobSASPermissions
- */
- this.tag = false;
+module.exports.insert.sync = insertSync
+function insertSync (cache, key, integrity, opts) {
+ opts = IndexOpts(opts)
+ const bucket = bucketPath(cache, key)
+ const entry = {
+ key,
+ integrity: integrity && ssri.stringify(integrity),
+ time: Date.now(),
+ size: opts.size,
+ metadata: opts.metadata
+ }
+ fixOwner.mkdirfix.sync(cache, path.dirname(bucket))
+ const stringified = JSON.stringify(entry)
+ fs.appendFileSync(
+ bucket, `\n${hashEntry(stringified)}\t${stringified}`
+ )
+ try {
+ fixOwner.chownr.sync(cache, bucket)
+ } catch (err) {
+ if (err.code !== 'ENOENT') {
+ throw err
}
- /**
- * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an
- * Error if it encounters a character that does not correspond to a valid permission.
- *
- * @static
- * @param {string} permissions
- * @returns {BlobSASPermissions}
- * @memberof BlobSASPermissions
- */
- BlobSASPermissions.parse = function (permissions) {
- var blobSASPermissions = new BlobSASPermissions();
- for (var _i = 0, permissions_1 = permissions; _i < permissions_1.length; _i++) {
- var char = permissions_1[_i];
- switch (char) {
- case "r":
- blobSASPermissions.read = true;
- break;
- case "a":
- blobSASPermissions.add = true;
- break;
- case "c":
- blobSASPermissions.create = true;
- break;
- case "w":
- blobSASPermissions.write = true;
- break;
- case "d":
- blobSASPermissions.delete = true;
- break;
- case "x":
- blobSASPermissions.deleteVersion = true;
- break;
- case "t":
- blobSASPermissions.tag = true;
- break;
- default:
- throw new RangeError("Invalid permission: " + char);
- }
- }
- return blobSASPermissions;
- };
- /**
- * Converts the given permissions to a string. Using this method will guarantee the permissions are in an
- * order accepted by the service.
- *
- * @returns {string} A string which represents the BlobSASPermissions
- * @memberof BlobSASPermissions
- */
- BlobSASPermissions.prototype.toString = function () {
- var permissions = [];
- if (this.read) {
- permissions.push("r");
- }
- if (this.add) {
- permissions.push("a");
- }
- if (this.create) {
- permissions.push("c");
- }
- if (this.write) {
- permissions.push("w");
- }
- if (this.delete) {
- permissions.push("d");
- }
- if (this.deleteVersion) {
- permissions.push("x");
- }
- if (this.tag) {
- permissions.push("t");
- }
- return permissions.join("");
- };
- return BlobSASPermissions;
-}());
+ }
+ return formatEntry(cache, entry)
+}
-// Copyright (c) Microsoft Corporation. All rights reserved.
-// Licensed under the MIT License.
-/**
- * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container.
- * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation.
- * Once all the values are set, this should be serialized with toString and set as the permissions field on a
- * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but
- * the order of the permissions is particular and this class guarantees correctness.
- *
- * @export
- * @class ContainerSASPermissions
- */
-var ContainerSASPermissions = /** @class */ (function () {
- function ContainerSASPermissions() {
- /**
- * Specifies Read access granted.
- *
- * @type {boolean}
- * @memberof ContainerSASPermissions
- */
- this.read = false;
- /**
- * Specifies Add access granted.
- *
- * @type {boolean}
- * @memberof ContainerSASPermissions
- */
- this.add = false;
- /**
- * Specifies Create access granted.
- *
- * @type {boolean}
- * @memberof ContainerSASPermissions
- */
- this.create = false;
- /**
- * Specifies Write access granted.
- *
- * @type {boolean}
- * @memberof ContainerSASPermissions
- */
- this.write = false;
- /**
- * Specifies Delete access granted.
- *
- * @type {boolean}
- * @memberof ContainerSASPermissions
- */
- this.delete = false;
- /**
- * Specifies Delete version access granted.
- *
- * @type {boolean}
- * @memberof ContainerSASPermissions
- */
- this.deleteVersion = false;
- /**
- * Specifies List access granted.
- *
- * @type {boolean}
- * @memberof ContainerSASPermissions
- */
- this.list = false;
- /**
- * Specfies Tag access granted.
- *
- * @type {boolean}
- * @memberof ContainerSASPermissions
- */
- this.tag = false;
+module.exports.find = find
+function find (cache, key) {
+ const bucket = bucketPath(cache, key)
+ return bucketEntries(bucket).then(entries => {
+ return entries.reduce((latest, next) => {
+ if (next && next.key === key) {
+ return formatEntry(cache, next)
+ } else {
+ return latest
+ }
+ }, null)
+ }).catch(err => {
+ if (err.code === 'ENOENT') {
+ return null
+ } else {
+ throw err
}
- /**
- * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an
- * Error if it encounters a character that does not correspond to a valid permission.
- *
- * @static
- * @param {string} permissions
- * @returns {ContainerSASPermissions}
- * @memberof ContainerSASPermissions
- */
- ContainerSASPermissions.parse = function (permissions) {
- var containerSASPermissions = new ContainerSASPermissions();
- for (var _i = 0, permissions_1 = permissions; _i < permissions_1.length; _i++) {
- var char = permissions_1[_i];
- switch (char) {
- case "r":
- containerSASPermissions.read = true;
- break;
- case "a":
- containerSASPermissions.add = true;
- break;
- case "c":
- containerSASPermissions.create = true;
- break;
- case "w":
- containerSASPermissions.write = true;
- break;
- case "d":
- containerSASPermissions.delete = true;
- break;
- case "l":
- containerSASPermissions.list = true;
- break;
- case "t":
- containerSASPermissions.tag = true;
- break;
- case "x":
- containerSASPermissions.deleteVersion = true;
- break;
- default:
- throw new RangeError("Invalid permission " + char);
- }
- }
- return containerSASPermissions;
- };
- /**
- * Converts the given permissions to a string. Using this method will guarantee the permissions are in an
- * order accepted by the service.
- *
- * The order of the characters should be as specified here to ensure correctness.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas
- *
- * @returns {string}
- * @memberof ContainerSASPermissions
- */
- ContainerSASPermissions.prototype.toString = function () {
- var permissions = [];
- if (this.read) {
- permissions.push("r");
- }
- if (this.add) {
- permissions.push("a");
- }
- if (this.create) {
- permissions.push("c");
- }
- if (this.write) {
- permissions.push("w");
- }
- if (this.delete) {
- permissions.push("d");
- }
- if (this.deleteVersion) {
- permissions.push("x");
- }
- if (this.list) {
- permissions.push("l");
- }
- if (this.tag) {
- permissions.push("t");
- }
- return permissions.join("");
- };
- return ContainerSASPermissions;
-}());
+ })
+}
-/**
- * ONLY AVAILABLE IN NODE.JS RUNTIME.
- *
- * UserDelegationKeyCredential is only used for generation of user delegation SAS.
- * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas
- *
- * @export
- * @class UserDelegationKeyCredential
- */
-var UserDelegationKeyCredential = /** @class */ (function () {
- /**
- * Creates an instance of UserDelegationKeyCredential.
- * @param {string} accountName
- * @param {UserDelegationKey} userDelegationKey
- * @memberof UserDelegationKeyCredential
- */
- function UserDelegationKeyCredential(accountName, userDelegationKey) {
- this.accountName = accountName;
- this.userDelegationKey = userDelegationKey;
- this.key = Buffer.from(userDelegationKey.value, "base64");
+module.exports.find.sync = findSync
+function findSync (cache, key) {
+ const bucket = bucketPath(cache, key)
+ try {
+ return bucketEntriesSync(bucket).reduce((latest, next) => {
+ if (next && next.key === key) {
+ return formatEntry(cache, next)
+ } else {
+ return latest
+ }
+ }, null)
+ } catch (err) {
+ if (err.code === 'ENOENT') {
+ return null
+ } else {
+ throw err
}
- /**
- * Generates a hash signature for an HTTP request or for a SAS.
- *
- * @param {string} stringToSign
- * @returns {string}
- * @memberof UserDelegationKeyCredential
- */
- UserDelegationKeyCredential.prototype.computeHMACSHA256 = function (stringToSign) {
- // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`);
- return crypto.createHmac("sha256", this.key)
- .update(stringToSign, "utf8")
- .digest("base64");
- };
- return UserDelegationKeyCredential;
-}());
+ }
+}
-// Copyright (c) Microsoft Corporation. All rights reserved.
-function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) {
- var version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION;
- var sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential
- ? sharedKeyCredentialOrUserDelegationKey
- : undefined;
- var userDelegationKeyCredential;
- if (sharedKeyCredential === undefined && accountName !== undefined) {
- userDelegationKeyCredential = new UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey);
- }
- if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) {
- throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName.");
+module.exports.delete = del
+function del (cache, key, opts) {
+ return insert(cache, key, null, opts)
+}
+
+module.exports.delete.sync = delSync
+function delSync (cache, key, opts) {
+ return insertSync(cache, key, null, opts)
+}
+
+module.exports.lsStream = lsStream
+function lsStream (cache) {
+ const indexDir = bucketDir(cache)
+ const stream = from.obj()
+
+ // "/cachename/*"
+ readdirOrEmpty(indexDir).map(bucket => {
+ const bucketPath = path.join(indexDir, bucket)
+
+ // "/cachename//*"
+ return readdirOrEmpty(bucketPath).map(subbucket => {
+ const subbucketPath = path.join(bucketPath, subbucket)
+
+ // "/cachename///*"
+ return readdirOrEmpty(subbucketPath).map(entry => {
+ const getKeyToEntry = bucketEntries(
+ path.join(subbucketPath, entry)
+ ).reduce((acc, entry) => {
+ acc.set(entry.key, entry)
+ return acc
+ }, new Map())
+
+ return getKeyToEntry.then(reduced => {
+ for (let entry of reduced.values()) {
+ const formatted = formatEntry(cache, entry)
+ formatted && stream.push(formatted)
+ }
+ }).catch({ code: 'ENOENT' }, nop)
+ })
+ })
+ }).then(() => {
+ stream.push(null)
+ }, err => {
+ stream.emit('error', err)
+ })
+
+ return stream
+}
+
+module.exports.ls = ls
+function ls (cache) {
+ return BB.fromNode(cb => {
+ lsStream(cache).on('error', cb).pipe(concat(entries => {
+ cb(null, entries.reduce((acc, xs) => {
+ acc[xs.key] = xs
+ return acc
+ }, {}))
+ }))
+ })
+}
+
+function bucketEntries (bucket, filter) {
+ return readFileAsync(
+ bucket, 'utf8'
+ ).then(data => _bucketEntries(data, filter))
+}
+
+function bucketEntriesSync (bucket, filter) {
+ const data = fs.readFileSync(bucket, 'utf8')
+ return _bucketEntries(data, filter)
+}
+
+function _bucketEntries (data, filter) {
+ let entries = []
+ data.split('\n').forEach(entry => {
+ if (!entry) { return }
+ const pieces = entry.split('\t')
+ if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
+ // Hash is no good! Corruption or malice? Doesn't matter!
+ // EJECT EJECT
+ return
}
- // Version 2019-12-12 adds support for the blob tags permission.
- // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields.
- // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string
- if (version >= "2018-11-09") {
- if (sharedKeyCredential !== undefined) {
- return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential);
- }
- else {
- return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential);
- }
+ let obj
+ try {
+ obj = JSON.parse(pieces[1])
+ } catch (e) {
+ // Entry is corrupted!
+ return
}
- if (version >= "2015-04-05") {
- if (sharedKeyCredential !== undefined) {
- return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential);
- }
- else {
- throw new RangeError("'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key.");
- }
+ if (obj) {
+ entries.push(obj)
}
- throw new RangeError("'version' must be >= '2015-04-05'.");
+ })
+ return entries
}
-/**
- * ONLY AVAILABLE IN NODE.JS RUNTIME.
- * IMPLEMENTATION FOR API VERSION FROM 2015-04-05 AND BEFORE 2018-11-09.
- *
- * Creates an instance of SASQueryParameters.
- *
- * Only accepts required settings needed to create a SAS. For optional settings please
- * set corresponding properties directly, such as permissions, startsOn and identifier.
- *
- * WARNING: When identifier is not provided, permissions and expiresOn are required.
- * You MUST assign value to identifier or expiresOn & permissions manually if you initial with
- * this constructor.
- *
- * @param {BlobSASSignatureValues} blobSASSignatureValues
- * @param {StorageSharedKeyCredential} sharedKeyCredential
- * @returns {SASQueryParameters}
- */
-function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) {
- if (!blobSASSignatureValues.identifier &&
- !blobSASSignatureValues.permissions &&
- !blobSASSignatureValues.expiresOn) {
- throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.");
+
+module.exports._bucketDir = bucketDir
+function bucketDir (cache) {
+ return path.join(cache, `index-v${indexV}`)
+}
+
+module.exports._bucketPath = bucketPath
+function bucketPath (cache, key) {
+ const hashed = hashKey(key)
+ return path.join.apply(path, [bucketDir(cache)].concat(
+ hashToSegments(hashed)
+ ))
+}
+
+module.exports._hashKey = hashKey
+function hashKey (key) {
+ return hash(key, 'sha256')
+}
+
+module.exports._hashEntry = hashEntry
+function hashEntry (str) {
+ return hash(str, 'sha1')
+}
+
+function hash (str, digest) {
+ return crypto
+ .createHash(digest)
+ .update(str)
+ .digest('hex')
+}
+
+function formatEntry (cache, entry) {
+ // Treat null digests as deletions. They'll shadow any previous entries.
+ if (!entry.integrity) { return null }
+ return {
+ key: entry.key,
+ integrity: entry.integrity,
+ path: contentPath(cache, entry.integrity),
+ size: entry.size,
+ time: entry.time,
+ metadata: entry.metadata
+ }
+}
+
+function readdirOrEmpty (dir) {
+ return readdirAsync(dir)
+ .catch({ code: 'ENOENT' }, () => [])
+ .catch({ code: 'ENOTDIR' }, () => [])
+}
+
+function nop () {
+}
+
+
+/***/ }),
+/* 408 */
+/***/ (function(module) {
+
+"use strict";
+
+
+function isArguments (thingy) {
+ return thingy != null && typeof thingy === 'object' && thingy.hasOwnProperty('callee')
+}
+
+var types = {
+ '*': {label: 'any', check: function () { return true }},
+ A: {label: 'array', check: function (thingy) { return Array.isArray(thingy) || isArguments(thingy) }},
+ S: {label: 'string', check: function (thingy) { return typeof thingy === 'string' }},
+ N: {label: 'number', check: function (thingy) { return typeof thingy === 'number' }},
+ F: {label: 'function', check: function (thingy) { return typeof thingy === 'function' }},
+ O: {label: 'object', check: function (thingy) { return typeof thingy === 'object' && thingy != null && !types.A.check(thingy) && !types.E.check(thingy) }},
+ B: {label: 'boolean', check: function (thingy) { return typeof thingy === 'boolean' }},
+ E: {label: 'error', check: function (thingy) { return thingy instanceof Error }},
+ Z: {label: 'null', check: function (thingy) { return thingy == null }}
+}
+
+function addSchema (schema, arity) {
+ var group = arity[schema.length] = arity[schema.length] || []
+ if (group.indexOf(schema) === -1) group.push(schema)
+}
+
+var validate = module.exports = function (rawSchemas, args) {
+ if (arguments.length !== 2) throw wrongNumberOfArgs(['SA'], arguments.length)
+ if (!rawSchemas) throw missingRequiredArg(0, 'rawSchemas')
+ if (!args) throw missingRequiredArg(1, 'args')
+ if (!types.S.check(rawSchemas)) throw invalidType(0, ['string'], rawSchemas)
+ if (!types.A.check(args)) throw invalidType(1, ['array'], args)
+ var schemas = rawSchemas.split('|')
+ var arity = {}
+
+ schemas.forEach(function (schema) {
+ for (var ii = 0; ii < schema.length; ++ii) {
+ var type = schema[ii]
+ if (!types[type]) throw unknownType(ii, type)
}
- var version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION;
- var resource = "c";
- var verifiedPermissions;
- if (blobSASSignatureValues.snapshotTime) {
- throw RangeError("'version' must be >= '2018-11-09' when provided 'snapshotTime'.");
+ if (/E.*E/.test(schema)) throw moreThanOneError(schema)
+ addSchema(schema, arity)
+ if (/E/.test(schema)) {
+ addSchema(schema.replace(/E.*$/, 'E'), arity)
+ addSchema(schema.replace(/E/, 'Z'), arity)
+ if (schema.length === 1) addSchema('', arity)
}
- if (blobSASSignatureValues.versionId) {
- throw RangeError("'version' must be >= '2019-10-10' when provided 'versionId'.");
+ })
+ var matching = arity[args.length]
+ if (!matching) {
+ throw wrongNumberOfArgs(Object.keys(arity), args.length)
+ }
+ for (var ii = 0; ii < args.length; ++ii) {
+ var newMatching = matching.filter(function (schema) {
+ var type = schema[ii]
+ var typeCheck = types[type].check
+ return typeCheck(args[ii])
+ })
+ if (!newMatching.length) {
+ var labels = matching.map(function (schema) {
+ return types[schema[ii]].label
+ }).filter(function (schema) { return schema != null })
+ throw invalidType(ii, labels, args[ii])
}
- if (blobSASSignatureValues.permissions &&
- blobSASSignatureValues.permissions.deleteVersion &&
- version < "2019-10-10") {
- throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission.");
+ matching = newMatching
+ }
+}
+
+function missingRequiredArg (num) {
+ return newException('EMISSINGARG', 'Missing required argument #' + (num + 1))
+}
+
+function unknownType (num, type) {
+ return newException('EUNKNOWNTYPE', 'Unknown type ' + type + ' in argument #' + (num + 1))
+}
+
+function invalidType (num, expectedTypes, value) {
+ var valueType
+ Object.keys(types).forEach(function (typeCode) {
+ if (types[typeCode].check(value)) valueType = types[typeCode].label
+ })
+ return newException('EINVALIDTYPE', 'Argument #' + (num + 1) + ': Expected ' +
+ englishList(expectedTypes) + ' but got ' + valueType)
+}
+
+function englishList (list) {
+ return list.join(', ').replace(/, ([^,]+)$/, ' or $1')
+}
+
+function wrongNumberOfArgs (expected, got) {
+ var english = englishList(expected)
+ var args = expected.every(function (ex) { return ex.length === 1 })
+ ? 'argument'
+ : 'arguments'
+ return newException('EWRONGARGCOUNT', 'Expected ' + english + ' ' + args + ' but got ' + got)
+}
+
+function moreThanOneError (schema) {
+ return newException('ETOOMANYERRORTYPES',
+ 'Only one error type per argument signature is allowed, more than one found in "' + schema + '"')
+}
+
+function newException (code, msg) {
+ var e = new Error(msg)
+ e.code = code
+ if (Error.captureStackTrace) Error.captureStackTrace(e, validate)
+ return e
+}
+
+
+/***/ }),
+/* 409 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+module.exports = function(Promise, INTERNAL, debug) {
+var util = __webpack_require__(248);
+var TimeoutError = Promise.TimeoutError;
+
+function HandleWrapper(handle) {
+ this.handle = handle;
+}
+
+HandleWrapper.prototype._resultCancelled = function() {
+ clearTimeout(this.handle);
+};
+
+var afterValue = function(value) { return delay(+this).thenReturn(value); };
+var delay = Promise.delay = function (ms, value) {
+ var ret;
+ var handle;
+ if (value !== undefined) {
+ ret = Promise.resolve(value)
+ ._then(afterValue, null, null, ms, undefined);
+ if (debug.cancellation() && value instanceof Promise) {
+ ret._setOnCancel(value);
+ }
+ } else {
+ ret = new Promise(INTERNAL);
+ handle = setTimeout(function() { ret._fulfill(); }, +ms);
+ if (debug.cancellation()) {
+ ret._setOnCancel(new HandleWrapper(handle));
+ }
+ ret._captureStackTrace();
}
- if (blobSASSignatureValues.permissions &&
- blobSASSignatureValues.permissions.tag &&
- version < "2019-12-12") {
- throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission.");
+ ret._setAsyncGuaranteed();
+ return ret;
+};
+
+Promise.prototype.delay = function (ms) {
+ return delay(ms, this);
+};
+
+var afterTimeout = function (promise, message, parent) {
+ var err;
+ if (typeof message !== "string") {
+ if (message instanceof Error) {
+ err = message;
+ } else {
+ err = new TimeoutError("operation timed out");
+ }
+ } else {
+ err = new TimeoutError(message);
}
- if (blobSASSignatureValues.blobName) {
- resource = "b";
+ util.markAsOriginatingFromRejection(err);
+ promise._attachExtraTrace(err);
+ promise._reject(err);
+
+ if (parent != null) {
+ parent.cancel();
}
- // Calling parse and toString guarantees the proper ordering and throws on invalid characters.
- if (blobSASSignatureValues.permissions) {
- if (blobSASSignatureValues.blobName) {
- verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();
- }
- else {
- verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();
+};
+
+function successClear(value) {
+ clearTimeout(this.handle);
+ return value;
+}
+
+function failureClear(reason) {
+ clearTimeout(this.handle);
+ throw reason;
+}
+
+Promise.prototype.timeout = function (ms, message) {
+ ms = +ms;
+ var ret, parent;
+
+ var handleWrapper = new HandleWrapper(setTimeout(function timeoutTimeout() {
+ if (ret.isPending()) {
+ afterTimeout(ret, message, parent);
}
+ }, ms));
+
+ if (debug.cancellation()) {
+ parent = this.then();
+ ret = parent._then(successClear, failureClear,
+ undefined, handleWrapper, undefined);
+ ret._setOnCancel(handleWrapper);
+ } else {
+ ret = this._then(successClear, failureClear,
+ undefined, handleWrapper, undefined);
}
- // Signature is generated on the un-url-encoded values.
- var stringToSign = [
- verifiedPermissions ? verifiedPermissions : "",
- blobSASSignatureValues.startsOn
- ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)
- : "",
- blobSASSignatureValues.expiresOn
- ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)
- : "",
- getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName),
- blobSASSignatureValues.identifier,
- blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "",
- blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "",
- version,
- blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "",
- blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "",
- blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "",
- blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "",
- blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : ""
- ].join("\n");
- var signature = sharedKeyCredential.computeHMACSHA256(stringToSign);
- return new SASQueryParameters(version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType);
-}
+
+ return ret;
+};
+
+};
+
+
+/***/ }),
+/* 410 */,
+/* 411 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+
/**
- * ONLY AVAILABLE IN NODE.JS RUNTIME.
- * IMPLEMENTATION FOR API VERSION FROM 2018-11-09.
- *
- * Creates an instance of SASQueryParameters.
- *
- * Only accepts required settings needed to create a SAS. For optional settings please
- * set corresponding properties directly, such as permissions, startsOn and identifier.
- *
- * WARNING: When identifier is not provided, permissions and expiresOn are required.
- * You MUST assign value to identifier or expiresOn & permissions manually if you initial with
- * this constructor.
+ * Module dependencies.
+ */
+var tty = __webpack_require__(867);
+
+var util = __webpack_require__(669);
+/**
+ * This is the Node.js implementation of `debug()`.
+ */
+
+
+exports.init = init;
+exports.log = log;
+exports.formatArgs = formatArgs;
+exports.save = save;
+exports.load = load;
+exports.useColors = useColors;
+/**
+ * Colors.
+ */
+
+exports.colors = [6, 2, 3, 4, 5, 1];
+
+try {
+ // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json)
+ // eslint-disable-next-line import/no-extraneous-dependencies
+ var supportsColor = __webpack_require__(247);
+
+ if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) {
+ exports.colors = [20, 21, 26, 27, 32, 33, 38, 39, 40, 41, 42, 43, 44, 45, 56, 57, 62, 63, 68, 69, 74, 75, 76, 77, 78, 79, 80, 81, 92, 93, 98, 99, 112, 113, 128, 129, 134, 135, 148, 149, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 178, 179, 184, 185, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 214, 215, 220, 221];
+ }
+} catch (error) {} // Swallow - we only care if `supports-color` is available; it doesn't have to be.
+
+/**
+ * Build up the default `inspectOpts` object from the environment variables.
*
- * @param {BlobSASSignatureValues} blobSASSignatureValues
- * @param {StorageSharedKeyCredential} sharedKeyCredential
- * @returns {SASQueryParameters}
+ * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js
*/
-function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) {
- if (!blobSASSignatureValues.identifier &&
- !blobSASSignatureValues.permissions &&
- !blobSASSignatureValues.expiresOn) {
- throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.");
- }
- var version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION;
- var resource = "c";
- var verifiedPermissions;
- if (blobSASSignatureValues.versionId && version < "2019-10-10") {
- throw RangeError("'version' must be >= '2019-10-10' when provided 'versionId'.");
- }
- if (blobSASSignatureValues.permissions &&
- blobSASSignatureValues.permissions.deleteVersion &&
- version < "2019-10-10") {
- throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission.");
- }
- if (blobSASSignatureValues.permissions &&
- blobSASSignatureValues.permissions.tag &&
- version < "2019-12-12") {
- throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission.");
- }
- if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.snapshotTime) {
- throw RangeError("Must provide 'blobName' when provided 'snapshotTime'.");
- }
- if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.versionId) {
- throw RangeError("Must provide 'blobName' when provided 'versionId'.");
- }
- var timestamp = blobSASSignatureValues.snapshotTime;
- if (blobSASSignatureValues.blobName) {
- resource = "b";
- if (blobSASSignatureValues.snapshotTime) {
- resource = "bs";
- }
- else if (blobSASSignatureValues.versionId) {
- resource = "bv";
- timestamp = blobSASSignatureValues.versionId;
- }
- }
- // Calling parse and toString guarantees the proper ordering and throws on invalid characters.
- if (blobSASSignatureValues.permissions) {
- if (blobSASSignatureValues.blobName) {
- verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();
- }
- else {
- verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();
- }
- }
- // Signature is generated on the un-url-encoded values.
- var stringToSign = [
- verifiedPermissions ? verifiedPermissions : "",
- blobSASSignatureValues.startsOn
- ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)
- : "",
- blobSASSignatureValues.expiresOn
- ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)
- : "",
- getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName),
- blobSASSignatureValues.identifier,
- blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "",
- blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "",
- version,
- resource,
- timestamp,
- blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "",
- blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "",
- blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "",
- blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "",
- blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : ""
- ].join("\n");
- var signature = sharedKeyCredential.computeHMACSHA256(stringToSign);
- return new SASQueryParameters(version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType);
+
+
+exports.inspectOpts = Object.keys(process.env).filter(function (key) {
+ return /^debug_/i.test(key);
+}).reduce(function (obj, key) {
+ // Camel-case
+ var prop = key.substring(6).toLowerCase().replace(/_([a-z])/g, function (_, k) {
+ return k.toUpperCase();
+ }); // Coerce string value into JS value
+
+ var val = process.env[key];
+
+ if (/^(yes|on|true|enabled)$/i.test(val)) {
+ val = true;
+ } else if (/^(no|off|false|disabled)$/i.test(val)) {
+ val = false;
+ } else if (val === 'null') {
+ val = null;
+ } else {
+ val = Number(val);
+ }
+
+ obj[prop] = val;
+ return obj;
+}, {});
+/**
+ * Is stdout a TTY? Colored output is enabled when `true`.
+ */
+
+function useColors() {
+ return 'colors' in exports.inspectOpts ? Boolean(exports.inspectOpts.colors) : tty.isatty(process.stderr.fd);
}
/**
- * ONLY AVAILABLE IN NODE.JS RUNTIME.
- * IMPLEMENTATION FOR API VERSION FROM 2018-11-09.
+ * Adds ANSI color escape codes if enabled.
*
- * Creates an instance of SASQueryParameters.
+ * @api public
+ */
+
+
+function formatArgs(args) {
+ var name = this.namespace,
+ useColors = this.useColors;
+
+ if (useColors) {
+ var c = this.color;
+ var colorCode = "\x1B[3" + (c < 8 ? c : '8;5;' + c);
+ var prefix = " ".concat(colorCode, ";1m").concat(name, " \x1B[0m");
+ args[0] = prefix + args[0].split('\n').join('\n' + prefix);
+ args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + "\x1B[0m");
+ } else {
+ args[0] = getDate() + name + ' ' + args[0];
+ }
+}
+
+function getDate() {
+ if (exports.inspectOpts.hideDate) {
+ return '';
+ }
+
+ return new Date().toISOString() + ' ';
+}
+/**
+ * Invokes `util.format()` with the specified arguments and writes to stderr.
+ */
+
+
+function log() {
+ return process.stderr.write(util.format.apply(util, arguments) + '\n');
+}
+/**
+ * Save `namespaces`.
*
- * Only accepts required settings needed to create a SAS. For optional settings please
- * set corresponding properties directly, such as permissions, startsOn and identifier.
+ * @param {String} namespaces
+ * @api private
+ */
+
+
+function save(namespaces) {
+ if (namespaces) {
+ process.env.DEBUG = namespaces;
+ } else {
+ // If you set a process.env field to null or undefined, it gets cast to the
+ // string 'null' or 'undefined'. Just delete instead.
+ delete process.env.DEBUG;
+ }
+}
+/**
+ * Load `namespaces`.
*
- * WARNING: identifier will be ignored, permissions and expiresOn are required.
+ * @return {String} returns the previously persisted debug modes
+ * @api private
+ */
+
+
+function load() {
+ return process.env.DEBUG;
+}
+/**
+ * Init logic for `debug` instances.
*
- * @param {BlobSASSignatureValues} blobSASSignatureValues
- * @param {UserDelegationKeyCredential} userDelegationKeyCredential
- * @returns {SASQueryParameters}
+ * Create a new `inspectOpts` object in case `useColors` is set
+ * differently for a particular `debug` instance.
*/
-function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) {
- if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) {
- throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS.");
- }
- var version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION;
- if (blobSASSignatureValues.versionId && version < "2019-10-10") {
- throw RangeError("'version' must be >= '2019-10-10' when provided 'versionId'.");
- }
- if (blobSASSignatureValues.permissions &&
- blobSASSignatureValues.permissions.deleteVersion &&
- version < "2019-10-10") {
- throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission.");
- }
- if (blobSASSignatureValues.permissions &&
- blobSASSignatureValues.permissions.tag &&
- version < "2019-12-12") {
- throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission.");
+
+
+function init(debug) {
+ debug.inspectOpts = {};
+ var keys = Object.keys(exports.inspectOpts);
+
+ for (var i = 0; i < keys.length; i++) {
+ debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]];
+ }
+}
+
+module.exports = __webpack_require__(783)(exports);
+var formatters = module.exports.formatters;
+/**
+ * Map %o to `util.inspect()`, all on a single line.
+ */
+
+formatters.o = function (v) {
+ this.inspectOpts.colors = this.useColors;
+ return util.inspect(v, this.inspectOpts).replace(/\s*\n\s*/g, ' ');
+};
+/**
+ * Map %O to `util.inspect()`, allowing multiple lines if needed.
+ */
+
+
+formatters.O = function (v) {
+ this.inspectOpts.colors = this.useColors;
+ return util.inspect(v, this.inspectOpts);
+};
+
+
+
+/***/ }),
+/* 412 */
+/***/ (function(module, exports, __webpack_require__) {
+
+"use strict";
+
+var Progress = __webpack_require__(264)
+var Gauge = __webpack_require__(429)
+var EE = __webpack_require__(614).EventEmitter
+var log = exports = module.exports = new EE()
+var util = __webpack_require__(669)
+
+var setBlocking = __webpack_require__(299)
+var consoleControl = __webpack_require__(920)
+
+setBlocking(true)
+var stream = process.stderr
+Object.defineProperty(log, 'stream', {
+ set: function (newStream) {
+ stream = newStream
+ if (this.gauge) this.gauge.setWriteTo(stream, stream)
+ },
+ get: function () {
+ return stream
+ }
+})
+
+// by default, decide based on tty-ness.
+var colorEnabled
+log.useColor = function () {
+ return colorEnabled != null ? colorEnabled : stream.isTTY
+}
+
+log.enableColor = function () {
+ colorEnabled = true
+ this.gauge.setTheme({hasColor: colorEnabled, hasUnicode: unicodeEnabled})
+}
+log.disableColor = function () {
+ colorEnabled = false
+ this.gauge.setTheme({hasColor: colorEnabled, hasUnicode: unicodeEnabled})
+}
+
+// default level
+log.level = 'info'
+
+log.gauge = new Gauge(stream, {
+ enabled: false, // no progress bars unless asked
+ theme: {hasColor: log.useColor()},
+ template: [
+ {type: 'progressbar', length: 20},
+ {type: 'activityIndicator', kerning: 1, length: 1},
+ {type: 'section', default: ''},
+ ':',
+ {type: 'logline', kerning: 1, default: ''}
+ ]
+})
+
+log.tracker = new Progress.TrackerGroup()
+
+// we track this separately as we may need to temporarily disable the
+// display of the status bar for our own loggy purposes.
+log.progressEnabled = log.gauge.isEnabled()
+
+var unicodeEnabled
+
+log.enableUnicode = function () {
+ unicodeEnabled = true
+ this.gauge.setTheme({hasColor: this.useColor(), hasUnicode: unicodeEnabled})
+}
+
+log.disableUnicode = function () {
+ unicodeEnabled = false
+ this.gauge.setTheme({hasColor: this.useColor(), hasUnicode: unicodeEnabled})
+}
+
+log.setGaugeThemeset = function (themes) {
+ this.gauge.setThemeset(themes)
+}
+
+log.setGaugeTemplate = function (template) {
+ this.gauge.setTemplate(template)
+}
+
+log.enableProgress = function () {
+ if (this.progressEnabled) return
+ this.progressEnabled = true
+ this.tracker.on('change', this.showProgress)
+ if (this._pause) return
+ this.gauge.enable()
+}
+
+log.disableProgress = function () {
+ if (!this.progressEnabled) return
+ this.progressEnabled = false
+ this.tracker.removeListener('change', this.showProgress)
+ this.gauge.disable()
+}
+
+var trackerConstructors = ['newGroup', 'newItem', 'newStream']
+
+var mixinLog = function (tracker) {
+ // mixin the public methods from log into the tracker
+ // (except: conflicts and one's we handle specially)
+ Object.keys(log).forEach(function (P) {
+ if (P[0] === '_') return
+ if (trackerConstructors.filter(function (C) { return C === P }).length) return
+ if (tracker[P]) return
+ if (typeof log[P] !== 'function') return
+ var func = log[P]
+ tracker[P] = function () {
+ return func.apply(log, arguments)
}
- var resource = "c";
- var verifiedPermissions;
- if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.snapshotTime) {
- throw RangeError("Must provide 'blobName' when provided 'snapshotTime'.");
+ })
+ // if the new tracker is a group, make sure any subtrackers get
+ // mixed in too
+ if (tracker instanceof Progress.TrackerGroup) {
+ trackerConstructors.forEach(function (C) {
+ var func = tracker[C]
+ tracker[C] = function () { return mixinLog(func.apply(tracker, arguments)) }
+ })
+ }
+ return tracker
+}
+
+// Add tracker constructors to the top level log object
+trackerConstructors.forEach(function (C) {
+ log[C] = function () { return mixinLog(this.tracker[C].apply(this.tracker, arguments)) }
+})
+
+log.clearProgress = function (cb) {
+ if (!this.progressEnabled) return cb && process.nextTick(cb)
+ this.gauge.hide(cb)
+}
+
+log.showProgress = function (name, completed) {
+ if (!this.progressEnabled) return
+ var values = {}
+ if (name) values.section = name
+ var last = log.record[log.record.length - 1]
+ if (last) {
+ values.subsection = last.prefix
+ var disp = log.disp[last.level] || last.level
+ var logline = this._format(disp, log.style[last.level])
+ if (last.prefix) logline += ' ' + this._format(last.prefix, this.prefixStyle)
+ logline += ' ' + last.message.split(/\r?\n/)[0]
+ values.logline = logline
+ }
+ values.completed = completed || this.tracker.completed()
+ this.gauge.show(values)
+}.bind(log) // bind for use in tracker's on-change listener
+
+// temporarily stop emitting, but don't drop
+log.pause = function () {
+ this._paused = true
+ if (this.progressEnabled) this.gauge.disable()
+}
+
+log.resume = function () {
+ if (!this._paused) return
+ this._paused = false
+
+ var b = this._buffer
+ this._buffer = []
+ b.forEach(function (m) {
+ this.emitLog(m)
+ }, this)
+ if (this.progressEnabled) this.gauge.enable()
+}
+
+log._buffer = []
+
+var id = 0
+log.record = []
+log.maxRecordSize = 10000
+log.log = function (lvl, prefix, message) {
+ var l = this.levels[lvl]
+ if (l === undefined) {
+ return this.emit('error', new Error(util.format(
+ 'Undefined log level: %j', lvl)))
+ }
+
+ var a = new Array(arguments.length - 2)
+ var stack = null
+ for (var i = 2; i < arguments.length; i++) {
+ var arg = a[i - 2] = arguments[i]
+
+ // resolve stack traces to a plain string.
+ if (typeof arg === 'object' && arg &&
+ (arg instanceof Error) && arg.stack) {
+
+ Object.defineProperty(arg, 'stack', {
+ value: stack = arg.stack + '',
+ enumerable: true,
+ writable: true
+ })
}
- if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.versionId) {
- throw RangeError("Must provide 'blobName' when provided 'versionId'.");
+ }
+ if (stack) a.unshift(stack + '\n')
+ message = util.format.apply(util, a)
+
+ var m = { id: id++,
+ level: lvl,
+ prefix: String(prefix || ''),
+ message: message,
+ messageRaw: a }
+
+ this.emit('log', m)
+ this.emit('log.' + lvl, m)
+ if (m.prefix) this.emit(m.prefix, m)
+
+ this.record.push(m)
+ var mrs = this.maxRecordSize
+ var n = this.record.length - mrs
+ if (n > mrs / 10) {
+ var newSize = Math.floor(mrs * 0.9)
+ this.record = this.record.slice(-1 * newSize)
+ }
+
+ this.emitLog(m)
+}.bind(log)
+
+log.emitLog = function (m) {
+ if (this._paused) {
+ this._buffer.push(m)
+ return
+ }
+ if (this.progressEnabled) this.gauge.pulse(m.prefix)
+ var l = this.levels[m.level]
+ if (l === undefined) return
+ if (l < this.levels[this.level]) return
+ if (l > 0 && !isFinite(l)) return
+
+ // If 'disp' is null or undefined, use the lvl as a default
+ // Allows: '', 0 as valid disp
+ var disp = log.disp[m.level] != null ? log.disp[m.level] : m.level
+ this.clearProgress()
+ m.message.split(/\r?\n/).forEach(function (line) {
+ if (this.heading) {
+ this.write(this.heading, this.headingStyle)
+ this.write(' ')
}
- var timestamp = blobSASSignatureValues.snapshotTime;
- if (blobSASSignatureValues.blobName) {
- resource = "b";
- if (blobSASSignatureValues.snapshotTime) {
- resource = "bs";
- }
- else if (blobSASSignatureValues.versionId) {
- resource = "bv";
- timestamp = blobSASSignatureValues.versionId;
+ this.write(disp, log.style[m.level])
+ var p = m.prefix || ''
+ if (p) this.write(' ')
+ this.write(p, this.prefixStyle)
+ this.write(' ' + line + '\n')
+ }, this)
+ this.showProgress()
+}
+
+log._format = function (msg, style) {
+ if (!stream) return
+
+ var output = ''
+ if (this.useColor()) {
+ style = style || {}
+ var settings = []
+ if (style.fg) settings.push(style.fg)
+ if (style.bg) settings.push('bg' + style.bg[0].toUpperCase() + style.bg.slice(1))
+ if (style.bold) settings.push('bold')
+ if (style.underline) settings.push('underline')
+ if (style.inverse) settings.push('inverse')
+ if (settings.length) output += consoleControl.color(settings)
+ if (style.beep) output += consoleControl.beep()
+ }
+ output += msg
+ if (this.useColor()) {
+ output += consoleControl.color('reset')
+ }
+ return output
+}
+
+log.write = function (msg, style) {
+ if (!stream) return
+
+ stream.write(this._format(msg, style))
+}
+
+log.addLevel = function (lvl, n, style, disp) {
+ // If 'disp' is null or undefined, use the lvl as a default
+ if (disp == null) disp = lvl
+ this.levels[lvl] = n
+ this.style[lvl] = style
+ if (!this[lvl]) {
+ this[lvl] = function () {
+ var a = new Array(arguments.length + 1)
+ a[0] = lvl
+ for (var i = 0; i < arguments.length; i++) {
+ a[i + 1] = arguments[i]
+ }
+ return this.log.apply(this, a)
+ }.bind(this)
+ }
+ this.disp[lvl] = disp
+}
+
+log.prefixStyle = { fg: 'magenta' }
+log.headingStyle = { fg: 'white', bg: 'black' }
+
+log.style = {}
+log.levels = {}
+log.disp = {}
+log.addLevel('silly', -Infinity, { inverse: true }, 'sill')
+log.addLevel('verbose', 1000, { fg: 'blue', bg: 'black' }, 'verb')
+log.addLevel('info', 2000, { fg: 'green' })
+log.addLevel('timing', 2500, { fg: 'green', bg: 'black' })
+log.addLevel('http', 3000, { fg: 'green', bg: 'black' })
+log.addLevel('notice', 3500, { fg: 'blue', bg: 'black' })
+log.addLevel('warn', 4000, { fg: 'black', bg: 'yellow' }, 'WARN')
+log.addLevel('error', 5000, { fg: 'red', bg: 'black' }, 'ERR!')
+log.addLevel('silent', Infinity)
+
+// allow 'error' prefix
+log.on('error', function () {})
+
+
+/***/ }),
+/* 413 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+module.exports = __webpack_require__(141);
+
+
+/***/ }),
+/* 414 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+var cr = Object.create;
+if (cr) {
+ var callerCache = cr(null);
+ var getterCache = cr(null);
+ callerCache[" size"] = getterCache[" size"] = 0;
+}
+
+module.exports = function(Promise) {
+var util = __webpack_require__(248);
+var canEvaluate = util.canEvaluate;
+var isIdentifier = util.isIdentifier;
+
+var getMethodCaller;
+var getGetter;
+if (true) {
+var makeMethodCaller = function (methodName) {
+ return new Function("ensureMethod", " \n\
+ return function(obj) { \n\
+ 'use strict' \n\
+ var len = this.length; \n\
+ ensureMethod(obj, 'methodName'); \n\
+ switch(len) { \n\
+ case 1: return obj.methodName(this[0]); \n\
+ case 2: return obj.methodName(this[0], this[1]); \n\
+ case 3: return obj.methodName(this[0], this[1], this[2]); \n\
+ case 0: return obj.methodName(); \n\
+ default: \n\
+ return obj.methodName.apply(obj, this); \n\
+ } \n\
+ }; \n\
+ ".replace(/methodName/g, methodName))(ensureMethod);
+};
+
+var makeGetter = function (propertyName) {
+ return new Function("obj", " \n\
+ 'use strict'; \n\
+ return obj.propertyName; \n\
+ ".replace("propertyName", propertyName));
+};
+
+var getCompiled = function(name, compiler, cache) {
+ var ret = cache[name];
+ if (typeof ret !== "function") {
+ if (!isIdentifier(name)) {
+ return null;
}
- }
- // Calling parse and toString guarantees the proper ordering and throws on invalid characters.
- if (blobSASSignatureValues.permissions) {
- if (blobSASSignatureValues.blobName) {
- verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();
+ ret = compiler(name);
+ cache[name] = ret;
+ cache[" size"]++;
+ if (cache[" size"] > 512) {
+ var keys = Object.keys(cache);
+ for (var i = 0; i < 256; ++i) delete cache[keys[i]];
+ cache[" size"] = keys.length - 256;
}
- else {
- verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString();
+ }
+ return ret;
+};
+
+getMethodCaller = function(name) {
+ return getCompiled(name, makeMethodCaller, callerCache);
+};
+
+getGetter = function(name) {
+ return getCompiled(name, makeGetter, getterCache);
+};
+}
+
+function ensureMethod(obj, methodName) {
+ var fn;
+ if (obj != null) fn = obj[methodName];
+ if (typeof fn !== "function") {
+ var message = "Object " + util.classString(obj) + " has no method '" +
+ util.toString(methodName) + "'";
+ throw new Promise.TypeError(message);
+ }
+ return fn;
+}
+
+function caller(obj) {
+ var methodName = this.pop();
+ var fn = ensureMethod(obj, methodName);
+ return fn.apply(obj, this);
+}
+Promise.prototype.call = function (methodName) {
+ var $_len = arguments.length;var args = new Array(Math.max($_len - 1, 0)); for(var $_i = 1; $_i < $_len; ++$_i) {args[$_i - 1] = arguments[$_i];};
+ if (true) {
+ if (canEvaluate) {
+ var maybeCaller = getMethodCaller(methodName);
+ if (maybeCaller !== null) {
+ return this._then(
+ maybeCaller, undefined, undefined, args, undefined);
+ }
}
}
- // Signature is generated on the un-url-encoded values.
- var stringToSign = [
- verifiedPermissions ? verifiedPermissions : "",
- blobSASSignatureValues.startsOn
- ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)
- : "",
- blobSASSignatureValues.expiresOn
- ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)
- : "",
- getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName),
- userDelegationKeyCredential.userDelegationKey.signedObjectId,
- userDelegationKeyCredential.userDelegationKey.signedTenantId,
- userDelegationKeyCredential.userDelegationKey.signedStartsOn
- ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false)
- : "",
- userDelegationKeyCredential.userDelegationKey.signedExpiresOn
- ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false)
- : "",
- userDelegationKeyCredential.userDelegationKey.signedService,
- userDelegationKeyCredential.userDelegationKey.signedVersion,
- blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "",
- blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "",
- version,
- resource,
- timestamp,
- blobSASSignatureValues.cacheControl,
- blobSASSignatureValues.contentDisposition,
- blobSASSignatureValues.contentEncoding,
- blobSASSignatureValues.contentLanguage,
- blobSASSignatureValues.contentType
- ].join("\n");
- var signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign);
- return new SASQueryParameters(version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey);
+ args.push(methodName);
+ return this._then(caller, undefined, undefined, args, undefined);
+};
+
+function namedGetter(obj) {
+ return obj[this];
}
-function getCanonicalName(accountName, containerName, blobName) {
- // Container: "/blob/account/containerName"
- // Blob: "/blob/account/containerName/blobName"
- var elements = ["/blob/" + accountName + "/" + containerName];
- if (blobName) {
- elements.push("/" + blobName);
+function indexedGetter(obj) {
+ var index = +this;
+ if (index < 0) index = Math.max(0, index + obj.length);
+ return obj[index];
+}
+Promise.prototype.get = function (propertyName) {
+ var isIndex = (typeof propertyName === "number");
+ var getter;
+ if (!isIndex) {
+ if (canEvaluate) {
+ var maybeGetter = getGetter(propertyName);
+ getter = maybeGetter !== null ? maybeGetter : namedGetter;
+ } else {
+ getter = namedGetter;
+ }
+ } else {
+ getter = indexedGetter;
}
- return elements.join("");
+ return this._then(getter, undefined, undefined, propertyName, undefined);
+};
+};
+
+
+/***/ }),
+/* 415 */
+/***/ (function(module, __unusedexports, __webpack_require__) {
+
+"use strict";
+
+
+const Buffer = __webpack_require__(921)
+
+// A readable tar stream creator
+// Technically, this is a transform stream that you write paths into,
+// and tar format comes out of.
+// The `add()` method is like `write()` but returns this,
+// and end() return `this` as well, so you can
+// do `new Pack(opt).add('files').add('dir').end().pipe(output)
+// You could also do something like:
+// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
+
+class PackJob {
+ constructor (path, absolute) {
+ this.path = path || './'
+ this.absolute = absolute
+ this.entry = null
+ this.stat = null
+ this.readdir = null
+ this.pending = false
+ this.ignore = false
+ this.piped = false
+ }
}
-Object.defineProperty(exports, 'BaseRequestPolicy', {
- enumerable: true,
- get: function () {
- return coreHttp.BaseRequestPolicy;
+const MiniPass = __webpack_require__(720)
+const zlib = __webpack_require__(268)
+const ReadEntry = __webpack_require__(589)
+const WriteEntry = __webpack_require__(485)
+const WriteEntrySync = WriteEntry.Sync
+const WriteEntryTar = WriteEntry.Tar
+const Yallist = __webpack_require__(612)
+const EOF = Buffer.alloc(1024)
+const ONSTAT = Symbol('onStat')
+const ENDED = Symbol('ended')
+const QUEUE = Symbol('queue')
+const CURRENT = Symbol('current')
+const PROCESS = Symbol('process')
+const PROCESSING = Symbol('processing')
+const PROCESSJOB = Symbol('processJob')
+const JOBS = Symbol('jobs')
+const JOBDONE = Symbol('jobDone')
+const ADDFSENTRY = Symbol('addFSEntry')
+const ADDTARENTRY = Symbol('addTarEntry')
+const STAT = Symbol('stat')
+const READDIR = Symbol('readdir')
+const ONREADDIR = Symbol('onreaddir')
+const PIPE = Symbol('pipe')
+const ENTRY = Symbol('entry')
+const ENTRYOPT = Symbol('entryOpt')
+const WRITEENTRYCLASS = Symbol('writeEntryClass')
+const WRITE = Symbol('write')
+const ONDRAIN = Symbol('ondrain')
+
+const fs = __webpack_require__(747)
+const path = __webpack_require__(622)
+const warner = __webpack_require__(937)
+
+const Pack = warner(class Pack extends MiniPass {
+ constructor (opt) {
+ super(opt)
+ opt = opt || Object.create(null)
+ this.opt = opt
+ this.cwd = opt.cwd || process.cwd()
+ this.maxReadSize = opt.maxReadSize
+ this.preservePaths = !!opt.preservePaths
+ this.strict = !!opt.strict
+ this.noPax = !!opt.noPax
+ this.prefix = (opt.prefix || '').replace(/(\\|\/)+$/, '')
+ this.linkCache = opt.linkCache || new Map()
+ this.statCache = opt.statCache || new Map()
+ this.readdirCache = opt.readdirCache || new Map()
+
+ this[WRITEENTRYCLASS] = WriteEntry
+ if (typeof opt.onwarn === 'function')
+ this.on('warn', opt.onwarn)
+
+ this.zip = null
+ if (opt.gzip) {
+ if (typeof opt.gzip !== 'object')
+ opt.gzip = {}
+ this.zip = new zlib.Gzip(opt.gzip)
+ this.zip.on('data', chunk => super.write(chunk))
+ this.zip.on('end', _ => super.end())
+ this.zip.on('drain', _ => this[ONDRAIN]())
+ this.on('resume', _ => this.zip.resume())
+ } else
+ this.on('drain', this[ONDRAIN])
+
+ this.portable = !!opt.portable
+ this.noDirRecurse = !!opt.noDirRecurse
+ this.follow = !!opt.follow
+ this.noMtime = !!opt.noMtime
+ this.mtime = opt.mtime || null
+
+ this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true
+
+ this[QUEUE] = new Yallist
+ this[JOBS] = 0
+ this.jobs = +opt.jobs || 4
+ this[PROCESSING] = false
+ this[ENDED] = false
+ }
+
+ [WRITE] (chunk) {
+ return super.write(chunk)
+ }
+
+ add (path) {
+ this.write(path)
+ return this
+ }
+
+ end (path) {
+ if (path)
+ this.write(path)
+ this[ENDED] = true
+ this[PROCESS]()
+ return this
+ }
+
+ write (path) {
+ if (this[ENDED])
+ throw new Error('write after end')
+
+ if (path instanceof ReadEntry)
+ this[ADDTARENTRY](path)
+ else
+ this[ADDFSENTRY](path)
+ return this.flowing
+ }
+
+ [ADDTARENTRY] (p) {
+ const absolute = path.resolve(this.cwd, p.path)
+ if (this.prefix)
+ p.path = this.prefix + '/' + p.path.replace(/^\.(\/+|$)/, '')
+
+ // in this case, we don't have to wait for the stat
+ if (!this.filter(p.path, p))
+ p.resume()
+ else {
+ const job = new PackJob(p.path, absolute, false)
+ job.entry = new WriteEntryTar(p, this[ENTRYOPT](job))
+ job.entry.on('end', _ => this[JOBDONE](job))
+ this[JOBS] += 1
+ this[QUEUE].push(job)
}
-});
-Object.defineProperty(exports, 'HttpHeaders', {
- enumerable: true,
- get: function () {
- return coreHttp.HttpHeaders;
+
+ this[PROCESS]()
+ }
+
+ [ADDFSENTRY] (p) {
+ const absolute = path.resolve(this.cwd, p)
+ if (this.prefix)
+ p = this.prefix + '/' + p.replace(/^\.(\/+|$)/, '')
+
+ this[QUEUE].push(new PackJob(p, absolute))
+ this[PROCESS]()
+ }
+
+ [STAT] (job) {
+ job.pending = true
+ this[JOBS] += 1
+ const stat = this.follow ? 'stat' : 'lstat'
+ fs[stat](job.absolute, (er, stat) => {
+ job.pending = false
+ this[JOBS] -= 1
+ if (er)
+ this.emit('error', er)
+ else
+ this[ONSTAT](job, stat)
+ })
+ }
+
+ [ONSTAT] (job, stat) {
+ this.statCache.set(job.absolute, stat)
+ job.stat = stat
+
+ // now we have the stat, we can filter it.
+ if (!this.filter(job.path, stat))
+ job.ignore = true
+
+ this[PROCESS]()
+ }
+
+ [READDIR] (job) {
+ job.pending = true
+ this[JOBS] += 1
+ fs.readdir(job.absolute, (er, entries) => {
+ job.pending = false
+ this[JOBS] -= 1
+ if (er)
+ return this.emit('error', er)
+ this[ONREADDIR](job, entries)
+ })
+ }
+
+ [ONREADDIR] (job, entries) {
+ this.readdirCache.set(job.absolute, entries)
+ job.readdir = entries
+ this[PROCESS]()
+ }
+
+ [PROCESS] () {
+ if (this[PROCESSING])
+ return
+
+ this[PROCESSING] = true
+ for (let w = this[QUEUE].head;
+ w !== null && this[JOBS] < this.jobs;
+ w = w.next) {
+ this[PROCESSJOB](w.value)
+ if (w.value.ignore) {
+ const p = w.next
+ this[QUEUE].removeNode(w)
+ w.next = p
+ }
}
-});
-Object.defineProperty(exports, 'RequestPolicyOptions', {
- enumerable: true,
- get: function () {
- return coreHttp.RequestPolicyOptions;
+
+ this[PROCESSING] = false
+
+ if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
+ if (this.zip)
+ this.zip.end(EOF)
+ else {
+ super.write(EOF)
+ super.end()
+ }
}
-});
-Object.defineProperty(exports, 'RestError', {
- enumerable: true,
- get: function () {
- return coreHttp.RestError;
+ }
+
+ get [CURRENT] () {
+ return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value
+ }
+
+ [JOBDONE] (job) {
+ this[QUEUE].shift()
+ this[JOBS] -= 1
+ this[PROCESS]()
+ }
+
+ [PROCESSJOB] (job) {
+ if (job.pending)
+ return
+
+ if (job.entry) {
+ if (job === this[CURRENT] && !job.piped)
+ this[PIPE](job)
+ return
}
-});
-Object.defineProperty(exports, 'WebResource', {
- enumerable: true,
- get: function () {
- return coreHttp.WebResource;
+
+ if (!job.stat) {
+ if (this.statCache.has(job.absolute))
+ this[ONSTAT](job, this.statCache.get(job.absolute))
+ else
+ this[STAT](job)
}
-});
-Object.defineProperty(exports, 'deserializationPolicy', {
- enumerable: true,
- get: function () {
- return coreHttp.deserializationPolicy;
+ if (!job.stat)
+ return
+
+ // filtered out!
+ if (job.ignore)
+ return
+
+ if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) {
+ if (this.readdirCache.has(job.absolute))
+ this[ONREADDIR](job, this.readdirCache.get(job.absolute))
+ else
+ this[READDIR](job)
+ if (!job.readdir)
+ return
}
-});
-exports.AccountSASPermissions = AccountSASPermissions;
-exports.AccountSASResourceTypes = AccountSASResourceTypes;
-exports.AccountSASServices = AccountSASServices;
-exports.AnonymousCredential = AnonymousCredential;
-exports.AnonymousCredentialPolicy = AnonymousCredentialPolicy;
-exports.AppendBlobClient = AppendBlobClient;
-exports.BlobBatch = BlobBatch;
-exports.BlobBatchClient = BlobBatchClient;
-exports.BlobClient = BlobClient;
-exports.BlobLeaseClient = BlobLeaseClient;
-exports.BlobSASPermissions = BlobSASPermissions;
-exports.BlobServiceClient = BlobServiceClient;
-exports.BlockBlobClient = BlockBlobClient;
-exports.ContainerClient = ContainerClient;
-exports.ContainerSASPermissions = ContainerSASPermissions;
-exports.Credential = Credential;
-exports.CredentialPolicy = CredentialPolicy;
-exports.PageBlobClient = PageBlobClient;
-exports.Pipeline = Pipeline;
-exports.SASQueryParameters = SASQueryParameters;
-exports.StorageBrowserPolicy = StorageBrowserPolicy;
-exports.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory;
-exports.StorageOAuthScopes = StorageOAuthScopes;
-exports.StorageRetryPolicy = StorageRetryPolicy;
-exports.StorageRetryPolicyFactory = StorageRetryPolicyFactory;
-exports.StorageSharedKeyCredential = StorageSharedKeyCredential;
-exports.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy;
-exports.generateAccountSASQueryParameters = generateAccountSASQueryParameters;
-exports.generateBlobSASQueryParameters = generateBlobSASQueryParameters;
-exports.logger = logger;
-exports.newPipeline = newPipeline;
-//# sourceMappingURL=index.js.map
+ // we know it doesn't have an entry, because that got checked above
+ job.entry = this[ENTRY](job)
+ if (!job.entry) {
+ job.ignore = true
+ return
+ }
+
+ if (job === this[CURRENT] && !job.piped)
+ this[PIPE](job)
+ }
+
+ [ENTRYOPT] (job) {
+ return {
+ onwarn: (msg, data) => {
+ this.warn(msg, data)
+ },
+ noPax: this.noPax,
+ cwd: this.cwd,
+ absolute: job.absolute,
+ preservePaths: this.preservePaths,
+ maxReadSize: this.maxReadSize,
+ strict: this.strict,
+ portable: this.portable,
+ linkCache: this.linkCache,
+ statCache: this.statCache,
+ noMtime: this.noMtime,
+ mtime: this.mtime
+ }
+ }
+
+ [ENTRY] (job) {
+ this[JOBS] += 1
+ try {
+ return new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job))
+ .on('end', () => this[JOBDONE](job))
+ .on('error', er => this.emit('error', er))
+ } catch (er) {
+ this.emit('error', er)
+ }
+ }
+
+ [ONDRAIN] () {
+ if (this[CURRENT] && this[CURRENT].entry)
+ this[CURRENT].entry.resume()
+ }
+
+ // like .pipe() but using super, because our write() is special
+ [PIPE] (job) {
+ job.piped = true
+
+ if (job.readdir)
+ job.readdir.forEach(entry => {
+ const p = this.prefix ?
+ job.path.slice(this.prefix.length + 1) || './'
+ : job.path
+
+ const base = p === './' ? '' : p.replace(/\/*$/, '/')
+ this[ADDFSENTRY](base + entry)
+ })
+
+ const source = job.entry
+ const zip = this.zip
-/***/ }),
-/* 374 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+ if (zip)
+ source.on('data', chunk => {
+ if (!zip.write(chunk))
+ source.pause()
+ })
+ else
+ source.on('data', chunk => {
+ if (!super.write(chunk))
+ source.pause()
+ })
+ }
-"use strict";
+ pause () {
+ if (this.zip)
+ this.zip.pause()
+ return super.pause()
+ }
+})
+class PackSync extends Pack {
+ constructor (opt) {
+ super(opt)
+ this[WRITEENTRYCLASS] = WriteEntrySync
+ }
-module.exports = __webpack_require__(990)
+ // pause/resume are no-ops in sync streams.
+ pause () {}
+ resume () {}
+
+ [STAT] (job) {
+ const stat = this.follow ? 'statSync' : 'lstatSync'
+ this[ONSTAT](job, fs[stat](job.absolute))
+ }
+
+ [READDIR] (job, stat) {
+ this[ONREADDIR](job, fs.readdirSync(job.absolute))
+ }
+
+ // gotta get it all in this tick
+ [PIPE] (job) {
+ const source = job.entry
+ const zip = this.zip
+
+ if (job.readdir)
+ job.readdir.forEach(entry => {
+ const p = this.prefix ?
+ job.path.slice(this.prefix.length + 1) || './'
+ : job.path
+
+ const base = p === './' ? '' : p.replace(/\/*$/, '/')
+ this[ADDFSENTRY](base + entry)
+ })
+
+ if (zip)
+ source.on('data', chunk => {
+ zip.write(chunk)
+ })
+ else
+ source.on('data', chunk => {
+ super[WRITE](chunk)
+ })
+ }
+}
+
+Pack.Sync = PackSync
+
+module.exports = Pack
/***/ }),
-/* 375 */,
-/* 376 */,
-/* 377 */
+/* 416 */,
+/* 417 */
+/***/ (function(module) {
+
+module.exports = require("crypto");
+
+/***/ }),
+/* 418 */
/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
-module.exports = function(Promise, INTERNAL) {
-var util = __webpack_require__(248);
-var errorObj = util.errorObj;
-var isObject = util.isObject;
-function tryConvertToPromise(obj, context) {
- if (isObject(obj)) {
- if (obj instanceof Promise) return obj;
- var then = getThen(obj);
- if (then === errorObj) {
- if (context) context._pushContext();
- var ret = Promise.reject(then.e);
- if (context) context._popContext();
- return ret;
- } else if (typeof then === "function") {
- if (isAnyBluebirdPromise(obj)) {
- var ret = new Promise(INTERNAL);
- obj._then(
- ret._fulfill,
- ret._reject,
- undefined,
- ret,
- null
- );
- return ret;
- }
- return doThenable(obj, then, context);
- }
+const fs = __webpack_require__(747)
+const path = __webpack_require__(622)
+const EE = __webpack_require__(614).EventEmitter
+const Minimatch = __webpack_require__(93).Minimatch
+
+class Walker extends EE {
+ constructor (opts) {
+ opts = opts || {}
+ super(opts)
+ this.path = opts.path || process.cwd()
+ this.basename = path.basename(this.path)
+ this.ignoreFiles = opts.ignoreFiles || [ '.ignore' ]
+ this.ignoreRules = {}
+ this.parent = opts.parent || null
+ this.includeEmpty = !!opts.includeEmpty
+ this.root = this.parent ? this.parent.root : this.path
+ this.follow = !!opts.follow
+ this.result = this.parent ? this.parent.result : new Set()
+ this.entries = null
+ this.sawError = false
+ }
+
+ sort (a, b) {
+ return a.localeCompare(b)
+ }
+
+ emit (ev, data) {
+ let ret = false
+ if (!(this.sawError && ev === 'error')) {
+ if (ev === 'error')
+ this.sawError = true
+ else if (ev === 'done' && !this.parent) {
+ data = Array.from(data)
+ .map(e => /^@/.test(e) ? `./${e}` : e).sort(this.sort)
+ this.result = data
+ }
+
+ if (ev === 'error' && this.parent)
+ ret = this.parent.emit('error', data)
+ else
+ ret = super.emit(ev, data)
}
- return obj;
-}
+ return ret
+ }
-function doGetThen(obj) {
- return obj.then;
-}
+ start () {
+ fs.readdir(this.path, (er, entries) =>
+ er ? this.emit('error', er) : this.onReaddir(entries))
+ return this
+ }
-function getThen(obj) {
- try {
- return doGetThen(obj);
- } catch (e) {
- errorObj.e = e;
- return errorObj;
+ isIgnoreFile (e) {
+ return e !== "." &&
+ e !== ".." &&
+ -1 !== this.ignoreFiles.indexOf(e)
+ }
+
+ onReaddir (entries) {
+ this.entries = entries
+ if (entries.length === 0) {
+ if (this.includeEmpty)
+ this.result.add(this.path.substr(this.root.length + 1))
+ this.emit('done', this.result)
+ } else {
+ const hasIg = this.entries.some(e =>
+ this.isIgnoreFile(e))
+
+ if (hasIg)
+ this.addIgnoreFiles()
+ else
+ this.filterEntries()
}
-}
+ }
-var hasProp = {}.hasOwnProperty;
-function isAnyBluebirdPromise(obj) {
- try {
- return hasProp.call(obj, "_promise0");
- } catch (e) {
- return false;
+ addIgnoreFiles () {
+ const newIg = this.entries
+ .filter(e => this.isIgnoreFile(e))
+
+ let igCount = newIg.length
+ const then = _ => {
+ if (--igCount === 0)
+ this.filterEntries()
}
-}
-function doThenable(x, then, context) {
- var promise = new Promise(INTERNAL);
- var ret = promise;
- if (context) context._pushContext();
- promise._captureStackTrace();
- if (context) context._popContext();
- var synchronous = true;
- var result = util.tryCatch(then).call(x, resolve, reject);
- synchronous = false;
+ newIg.forEach(e => this.addIgnoreFile(e, then))
+ }
- if (promise && result === errorObj) {
- promise._rejectCallback(result.e, true, true);
- promise = null;
+ addIgnoreFile (file, then) {
+ const ig = path.resolve(this.path, file)
+ fs.readFile(ig, 'utf8', (er, data) =>
+ er ? this.emit('error', er) : this.onReadIgnoreFile(file, data, then))
+ }
+
+ onReadIgnoreFile (file, data, then) {
+ const mmopt = {
+ matchBase: true,
+ dot: true,
+ flipNegate: true,
+ nocase: true
}
+ const rules = data.split(/\r?\n/)
+ .filter(line => !/^#|^$/.test(line.trim()))
+ .map(r => new Minimatch(r, mmopt))
- function resolve(value) {
- if (!promise) return;
- promise._resolveCallback(value);
- promise = null;
+ this.ignoreRules[file] = rules
+
+ then()
+ }
+
+ filterEntries () {
+ // at this point we either have ignore rules, or just inheriting
+ // this exclusion is at the point where we know the list of
+ // entries in the dir, but don't know what they are. since
+ // some of them *might* be directories, we have to run the
+ // match in dir-mode as well, so that we'll pick up partials
+ // of files that will be included later. Anything included
+ // at this point will be checked again later once we know
+ // what it is.
+ const filtered = this.entries.map(entry => {
+ // at this point, we don't know if it's a dir or not.
+ const passFile = this.filterEntry(entry)
+ const passDir = this.filterEntry(entry, true)
+ return (passFile || passDir) ? [entry, passFile, passDir] : false
+ }).filter(e => e)
+
+ // now we stat them all
+ // if it's a dir, and passes as a dir, then recurse
+ // if it's not a dir, but passes as a file, add to set
+ let entryCount = filtered.length
+ if (entryCount === 0) {
+ this.emit('done', this.result)
+ } else {
+ const then = _ => {
+ if (-- entryCount === 0)
+ this.emit('done', this.result)
+ }
+ filtered.forEach(filt => {
+ const entry = filt[0]
+ const file = filt[1]
+ const dir = filt[2]
+ this.stat(entry, file, dir, then)
+ })
}
+ }
- function reject(reason) {
- if (!promise) return;
- promise._rejectCallback(reason, synchronous, true);
- promise = null;
+ onstat (st, entry, file, dir, then) {
+ const abs = this.path + '/' + entry
+ if (!st.isDirectory()) {
+ if (file)
+ this.result.add(abs.substr(this.root.length + 1))
+ then()
+ } else {
+ // is a directory
+ if (dir)
+ this.walker(entry, then)
+ else
+ then()
}
- return ret;
-}
+ }
-return tryConvertToPromise;
-};
+ stat (entry, file, dir, then) {
+ const abs = this.path + '/' + entry
+ fs[this.follow ? 'stat' : 'lstat'](abs, (er, st) => {
+ if (er)
+ this.emit('error', er)
+ else
+ this.onstat(st, entry, file, dir, then)
+ })
+ }
+ walkerOpt (entry) {
+ return {
+ path: this.path + '/' + entry,
+ parent: this,
+ ignoreFiles: this.ignoreFiles,
+ follow: this.follow,
+ includeEmpty: this.includeEmpty
+ }
+ }
-/***/ }),
-/* 378 */,
-/* 379 */,
-/* 380 */,
-/* 381 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+ walker (entry, then) {
+ new Walker(this.walkerOpt(entry)).on('done', then).start()
+ }
-"use strict";
+ filterEntry (entry, partial) {
+ let included = true
+ // this = /a/b/c
+ // entry = d
+ // parent /a/b sees c/d
+ if (this.parent && this.parent.filterEntry) {
+ var pt = this.basename + "/" + entry
+ included = this.parent.filterEntry(pt, partial)
+ }
-var errcode = __webpack_require__(120);
-var retry = __webpack_require__(58);
+ this.ignoreFiles.forEach(f => {
+ if (this.ignoreRules[f]) {
+ this.ignoreRules[f].forEach(rule => {
+ // negation means inclusion
+ // so if it's negated, and already included, no need to check
+ // likewise if it's neither negated nor included
+ if (rule.negate !== included) {
+ // first, match against /foo/bar
+ // then, against foo/bar
+ // then, in the case of partials, match with a /
+ const match = rule.match('/' + entry) ||
+ rule.match(entry) ||
+ (!!partial && (
+ rule.match('/' + entry + '/') ||
+ rule.match(entry + '/'))) ||
+ (!!partial && rule.negate && (
+ rule.match('/' + entry, true) ||
+ rule.match(entry, true)))
-var hasOwn = Object.prototype.hasOwnProperty;
+ if (match)
+ included = rule.negate
+ }
+ })
+ }
+ })
-function isRetryError(err) {
- return err && err.code === 'EPROMISERETRY' && hasOwn.call(err, 'retried');
+ return included
+ }
}
-function promiseRetry(fn, options) {
- var temp;
- var operation;
+class WalkerSync extends Walker {
+ constructor (opt) {
+ super(opt)
+ }
- if (typeof fn === 'object' && typeof options === 'function') {
- // Swap options and fn when using alternate signature (options, fn)
- temp = options;
- options = fn;
- fn = temp;
- }
+ start () {
+ this.onReaddir(fs.readdirSync(this.path))
+ return this
+ }
- operation = retry.operation(options);
+ addIgnoreFile (file, then) {
+ const ig = path.resolve(this.path, file)
+ this.onReadIgnoreFile(file, fs.readFileSync(ig, 'utf8'), then)
+ }
- return new Promise(function (resolve, reject) {
- operation.attempt(function (number) {
- Promise.resolve()
- .then(function () {
- return fn(function (err) {
- if (isRetryError(err)) {
- err = err.retried;
- }
+ stat (entry, file, dir, then) {
+ const abs = this.path + '/' + entry
+ const st = fs[this.follow ? 'statSync' : 'lstatSync'](abs)
+ this.onstat(st, entry, file, dir, then)
+ }
- throw errcode('Retrying', 'EPROMISERETRY', { retried: err });
- }, number);
- })
- .then(resolve, function (err) {
- if (isRetryError(err)) {
- err = err.retried;
+ walker (entry, then) {
+ new WalkerSync(this.walkerOpt(entry)).start()
+ then()
+ }
+}
- if (operation.retry(err || new Error())) {
- return;
- }
- }
+const walk = (options, callback) => {
+ const p = new Promise((resolve, reject) => {
+ new Walker(options).on('done', resolve).on('error', reject).start()
+ })
+ return callback ? p.then(res => callback(null, res), callback) : p
+}
- reject(err);
- });
- });
- });
+const walkSync = options => {
+ return new WalkerSync(options).start().result
}
-module.exports = promiseRetry;
+module.exports = walk
+walk.sync = walkSync
+walk.Walker = Walker
+walk.WalkerSync = WalkerSync
/***/ }),
-/* 382 */
+/* 419 */,
+/* 420 */
/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
-var iconvLite = __webpack_require__(841);
-
-// Expose to the world
-module.exports.convert = convert;
-
-/**
- * Convert encoding of an UTF-8 string or a buffer
- *
- * @param {String|Buffer} str String to be converted
- * @param {String} to Encoding to be converted to
- * @param {String} [from='UTF-8'] Encoding to be converted from
- * @return {Buffer} Encoded string
- */
-function convert(str, to, from) {
- from = checkEncoding(from || 'UTF-8');
- to = checkEncoding(to || 'UTF-8');
- str = str || '';
-
- var result;
-
- if (from !== 'UTF-8' && typeof str === 'string') {
- str = Buffer.from(str, 'binary');
- }
+const figgyPudding = __webpack_require__(965)
+const logger = __webpack_require__(354)
- if (from === to) {
- if (typeof str === 'string') {
- result = Buffer.from(str);
- } else {
- result = str;
- }
- } else {
- try {
- result = convertIconvLite(str, to, from);
- } catch (E) {
- console.error(E);
- result = str;
- }
- }
+const AUTH_REGEX = /^(?:.*:)?(token|_authToken|username|_password|password|email|always-auth|_auth|otp)$/
+const SCOPE_REGISTRY_REGEX = /@.*:registry$/gi
+module.exports = figgyPudding({
+ annotate: {},
+ cache: {},
+ defaultTag: 'tag',
+ dirPacker: {},
+ dmode: {},
+ 'enjoy-by': 'enjoyBy',
+ enjoyBy: {},
+ before: 'enjoyBy',
+ fmode: {},
+ 'fetch-retries': { default: 2 },
+ 'fetch-retry-factor': { default: 10 },
+ 'fetch-retry-maxtimeout': { default: 60000 },
+ 'fetch-retry-mintimeout': { default: 10000 },
+ fullMetadata: 'full-metadata',
+ 'full-metadata': { default: false },
+ gid: {},
+ git: {},
+ includeDeprecated: { default: true },
+ 'include-deprecated': 'includeDeprecated',
+ integrity: {},
+ log: { default: logger },
+ memoize: {},
+ offline: {},
+ preferOffline: 'prefer-offline',
+ 'prefer-offline': {},
+ preferOnline: 'prefer-online',
+ 'prefer-online': {},
+ registry: { default: 'https://registry.npmjs.org/' },
+ resolved: {},
+ retry: {},
+ scope: {},
+ tag: { default: 'latest' },
+ uid: {},
+ umask: {},
+ where: {}
+}, {
+ other (key) {
+ return key.match(AUTH_REGEX) || key.match(SCOPE_REGISTRY_REGEX)
+ }
+})
- if (typeof result === 'string') {
- result = Buffer.from(result, 'utf-8');
- }
- return result;
-}
+/***/ }),
+/* 421 */
+/***/ (function(module) {
/**
- * Convert encoding of astring with iconv-lite
- *
- * @param {String|Buffer} str String to be converted
- * @param {String} to Encoding to be converted to
- * @param {String} [from='UTF-8'] Encoding to be converted from
- * @return {Buffer} Encoded string
+ * Helpers.
*/
-function convertIconvLite(str, to, from) {
- if (to === 'UTF-8') {
- return iconvLite.decode(str, from);
- } else if (from === 'UTF-8') {
- return iconvLite.encode(str, to);
- } else {
- return iconvLite.encode(iconvLite.decode(str, from), to);
- }
-}
+
+var s = 1000;
+var m = s * 60;
+var h = m * 60;
+var d = h * 24;
+var y = d * 365.25;
/**
- * Converts charset name if needed
+ * Parse or format the given `val`.
*
- * @param {String} name Character set
- * @return {String} Character set name
+ * Options:
+ *
+ * - `long` verbose formatting [false]
+ *
+ * @param {String|Number} val
+ * @param {Object} [options]
+ * @throws {Error} throw an error if val is not a non-empty string or a number
+ * @return {String|Number}
+ * @api public
*/
-function checkEncoding(name) {
- return (name || '')
- .toString()
- .trim()
- .replace(/^latin[\-_]?(\d+)$/i, 'ISO-8859-$1')
- .replace(/^win(?:dows)?[\-_]?(\d+)$/i, 'WINDOWS-$1')
- .replace(/^utf[\-_]?(\d+)$/i, 'UTF-$1')
- .replace(/^ks_c_5601\-1987$/i, 'CP949')
- .replace(/^us[\-_]?ascii$/i, 'ASCII')
- .toUpperCase();
-}
-
-
-/***/ }),
-/* 383 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
-"use strict";
+module.exports = function(val, options) {
+ options = options || {};
+ var type = typeof val;
+ if (type === 'string' && val.length > 0) {
+ return parse(val);
+ } else if (type === 'number' && isNaN(val) === false) {
+ return options.long ? fmtLong(val) : fmtShort(val);
+ }
+ throw new Error(
+ 'val is not a non-empty string or a valid number. val=' +
+ JSON.stringify(val)
+ );
+};
-Object.defineProperty(exports, "__esModule", { value: true });
-const assert = __webpack_require__(357);
-const path = __webpack_require__(622);
-const pathHelper = __webpack_require__(972);
-const IS_WINDOWS = process.platform === 'win32';
/**
- * Helper class for parsing paths into segments
+ * Parse the given `str` and return milliseconds.
+ *
+ * @param {String} str
+ * @return {Number}
+ * @api private
*/
-class Path {
- /**
- * Constructs a Path
- * @param itemPath Path or array of segments
- */
- constructor(itemPath) {
- this.segments = [];
- // String
- if (typeof itemPath === 'string') {
- assert(itemPath, `Parameter 'itemPath' must not be empty`);
- // Normalize slashes and trim unnecessary trailing slash
- itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
- // Not rooted
- if (!pathHelper.hasRoot(itemPath)) {
- this.segments = itemPath.split(path.sep);
- }
- // Rooted
- else {
- // Add all segments, while not at the root
- let remaining = itemPath;
- let dir = pathHelper.dirname(remaining);
- while (dir !== remaining) {
- // Add the segment
- const basename = path.basename(remaining);
- this.segments.unshift(basename);
- // Truncate the last segment
- remaining = dir;
- dir = pathHelper.dirname(remaining);
- }
- // Remainder is the root
- this.segments.unshift(remaining);
- }
- }
- // Array
- else {
- // Must not be empty
- assert(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);
- // Each segment
- for (let i = 0; i < itemPath.length; i++) {
- let segment = itemPath[i];
- // Must not be empty
- assert(segment, `Parameter 'itemPath' must not contain any empty segments`);
- // Normalize slashes
- segment = pathHelper.normalizeSeparators(itemPath[i]);
- // Root segment
- if (i === 0 && pathHelper.hasRoot(segment)) {
- segment = pathHelper.safeTrimTrailingSeparator(segment);
- assert(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);
- this.segments.push(segment);
- }
- // All other segments
- else {
- // Must not contain slash
- assert(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);
- this.segments.push(segment);
- }
- }
- }
- }
- /**
- * Converts the path to it's string representation
- */
- toString() {
- // First segment
- let result = this.segments[0];
- // All others
- let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result));
- for (let i = 1; i < this.segments.length; i++) {
- if (skipSlash) {
- skipSlash = false;
- }
- else {
- result += path.sep;
- }
- result += this.segments[i];
- }
- return result;
- }
-}
-exports.Path = Path;
-//# sourceMappingURL=internal-path.js.map
-
-/***/ }),
-/* 384 */
-/***/ (function(module) {
-"use strict";
+function parse(str) {
+ str = String(str);
+ if (str.length > 100) {
+ return;
+ }
+ var match = /^((?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|years?|yrs?|y)?$/i.exec(
+ str
+ );
+ if (!match) {
+ return;
+ }
+ var n = parseFloat(match[1]);
+ var type = (match[2] || 'ms').toLowerCase();
+ switch (type) {
+ case 'years':
+ case 'year':
+ case 'yrs':
+ case 'yr':
+ case 'y':
+ return n * y;
+ case 'days':
+ case 'day':
+ case 'd':
+ return n * d;
+ case 'hours':
+ case 'hour':
+ case 'hrs':
+ case 'hr':
+ case 'h':
+ return n * h;
+ case 'minutes':
+ case 'minute':
+ case 'mins':
+ case 'min':
+ case 'm':
+ return n * m;
+ case 'seconds':
+ case 'second':
+ case 'secs':
+ case 'sec':
+ case 's':
+ return n * s;
+ case 'milliseconds':
+ case 'millisecond':
+ case 'msecs':
+ case 'msec':
+ case 'ms':
+ return n;
+ default:
+ return undefined;
+ }
+}
+/**
+ * Short format for `ms`.
+ *
+ * @param {Number} ms
+ * @return {String}
+ * @api private
+ */
-module.exports.isObjectProto = isObjectProto
-function isObjectProto (obj) {
- return obj === Object.prototype
+function fmtShort(ms) {
+ if (ms >= d) {
+ return Math.round(ms / d) + 'd';
+ }
+ if (ms >= h) {
+ return Math.round(ms / h) + 'h';
+ }
+ if (ms >= m) {
+ return Math.round(ms / m) + 'm';
+ }
+ if (ms >= s) {
+ return Math.round(ms / s) + 's';
+ }
+ return ms + 'ms';
}
-const _null = {}
-const _undefined = {}
-const Bool = Boolean
-const Num = Number
-const Str = String
-const boolCache = {
- true: new Bool(true),
- false: new Bool(false)
+/**
+ * Long format for `ms`.
+ *
+ * @param {Number} ms
+ * @return {String}
+ * @api private
+ */
+
+function fmtLong(ms) {
+ return plural(ms, d, 'day') ||
+ plural(ms, h, 'hour') ||
+ plural(ms, m, 'minute') ||
+ plural(ms, s, 'second') ||
+ ms + ' ms';
}
-const numCache = {}
-const strCache = {}
-/*
- * Returns a useful dispatch object for value using a process similar to
- * the ToObject operation specified in http://es5.github.com/#x9.9
+/**
+ * Pluralization helper.
*/
-module.exports.dispatchableObject = dispatchableObject
-function dispatchableObject (value) {
- // To shut up jshint, which doesn't let me turn off this warning.
- const Obj = Object
- if (value === null) { return _null }
- if (value === undefined) { return _undefined }
- switch (typeof value) {
- case 'object': return value
- case 'boolean': return boolCache[value]
- case 'number': return numCache[value] || (numCache[value] = new Num(value))
- case 'string': return strCache[value] || (strCache[value] = new Str(value))
- default: return new Obj(value)
+
+function plural(ms, n, name) {
+ if (ms < n) {
+ return;
+ }
+ if (ms < n * 1.5) {
+ return Math.floor(ms / n) + ' ' + name;
}
+ return Math.ceil(ms / n) + ' ' + name + 's';
}
/***/ }),
-/* 385 */,
-/* 386 */,
-/* 387 */
+/* 422 */
/***/ (function(module) {
-module.exports = {"name":"node-gyp","description":"Node.js native addon build tool","license":"MIT","keywords":["native","addon","module","c","c++","bindings","gyp"],"version":"5.1.1","installVersion":9,"author":"Nathan Rajlich (http://tootallnate.net)","repository":{"type":"git","url":"git://github.com/nodejs/node-gyp.git"},"preferGlobal":true,"bin":"./bin/node-gyp.js","main":"./lib/node-gyp.js","dependencies":{"env-paths":"^2.2.0","glob":"^7.1.4","graceful-fs":"^4.2.2","mkdirp":"^0.5.1","nopt":"^4.0.1","npmlog":"^4.1.2","request":"^2.88.0","rimraf":"^2.6.3","semver":"^5.7.1","tar":"^4.4.12","which":"^1.3.1"},"engines":{"node":">= 6.0.0"},"devDependencies":{"bindings":"^1.5.0","nan":"^2.14.0","require-inject":"^1.4.4","standard":"^14.3.1","tap":"~12.7.0"},"scripts":{"lint":"standard */*.js test/**/*.js","test":"npm run lint && tap --timeout=120 test/test-*"}};
+/*! *****************************************************************************
+Copyright (c) Microsoft Corporation.
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
+***************************************************************************** */
+
+/* global global, define, System, Reflect, Promise */
+var __extends;
+var __assign;
+var __rest;
+var __decorate;
+var __param;
+var __metadata;
+var __awaiter;
+var __generator;
+var __exportStar;
+var __values;
+var __read;
+var __spread;
+var __spreadArrays;
+var __await;
+var __asyncGenerator;
+var __asyncDelegator;
+var __asyncValues;
+var __makeTemplateObject;
+var __importStar;
+var __importDefault;
+var __classPrivateFieldGet;
+var __classPrivateFieldSet;
+var __createBinding;
+(function (factory) {
+ var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {};
+ if (typeof define === "function" && define.amd) {
+ define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); });
+ }
+ else if ( true && typeof module.exports === "object") {
+ factory(createExporter(root, createExporter(module.exports)));
+ }
+ else {
+ factory(createExporter(root));
+ }
+ function createExporter(exports, previous) {
+ if (exports !== root) {
+ if (typeof Object.create === "function") {
+ Object.defineProperty(exports, "__esModule", { value: true });
+ }
+ else {
+ exports.__esModule = true;
+ }
+ }
+ return function (id, v) { return exports[id] = previous ? previous(id, v) : v; };
+ }
+})
+(function (exporter) {
+ var extendStatics = Object.setPrototypeOf ||
+ ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
+ function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
+
+ __extends = function (d, b) {
+ extendStatics(d, b);
+ function __() { this.constructor = d; }
+ d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
+ };
+
+ __assign = Object.assign || function (t) {
+ for (var s, i = 1, n = arguments.length; i < n; i++) {
+ s = arguments[i];
+ for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];
+ }
+ return t;
+ };
+
+ __rest = function (s, e) {
+ var t = {};
+ for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
+ t[p] = s[p];
+ if (s != null && typeof Object.getOwnPropertySymbols === "function")
+ for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
+ if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
+ t[p[i]] = s[p[i]];
+ }
+ return t;
+ };
+
+ __decorate = function (decorators, target, key, desc) {
+ var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
+ if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
+ else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
+ return c > 3 && r && Object.defineProperty(target, key, r), r;
+ };
+
+ __param = function (paramIndex, decorator) {
+ return function (target, key) { decorator(target, key, paramIndex); }
+ };
+
+ __metadata = function (metadataKey, metadataValue) {
+ if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue);
+ };
+
+ __awaiter = function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+ };
+
+ __generator = function (thisArg, body) {
+ var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
+ return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
+ function verb(n) { return function (v) { return step([n, v]); }; }
+ function step(op) {
+ if (f) throw new TypeError("Generator is already executing.");
+ while (_) try {
+ if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
+ if (y = 0, t) op = [op[0] & 2, t.value];
+ switch (op[0]) {
+ case 0: case 1: t = op; break;
+ case 4: _.label++; return { value: op[1], done: false };
+ case 5: _.label++; y = op[1]; op = [0]; continue;
+ case 7: op = _.ops.pop(); _.trys.pop(); continue;
+ default:
+ if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
+ if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
+ if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
+ if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
+ if (t[2]) _.ops.pop();
+ _.trys.pop(); continue;
+ }
+ op = body.call(thisArg, _);
+ } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
+ if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
+ }
+ };
+
+ __createBinding = function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+ };
+
+ __exportStar = function (m, exports) {
+ for (var p in m) if (p !== "default" && !exports.hasOwnProperty(p)) exports[p] = m[p];
+ };
+
+ __values = function (o) {
+ var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0;
+ if (m) return m.call(o);
+ if (o && typeof o.length === "number") return {
+ next: function () {
+ if (o && i >= o.length) o = void 0;
+ return { value: o && o[i++], done: !o };
+ }
+ };
+ throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined.");
+ };
+
+ __read = function (o, n) {
+ var m = typeof Symbol === "function" && o[Symbol.iterator];
+ if (!m) return o;
+ var i = m.call(o), r, ar = [], e;
+ try {
+ while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);
+ }
+ catch (error) { e = { error: error }; }
+ finally {
+ try {
+ if (r && !r.done && (m = i["return"])) m.call(i);
+ }
+ finally { if (e) throw e.error; }
+ }
+ return ar;
+ };
+
+ __spread = function () {
+ for (var ar = [], i = 0; i < arguments.length; i++)
+ ar = ar.concat(__read(arguments[i]));
+ return ar;
+ };
+
+ __spreadArrays = function () {
+ for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;
+ for (var r = Array(s), k = 0, i = 0; i < il; i++)
+ for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)
+ r[k] = a[j];
+ return r;
+ };
+
+ __await = function (v) {
+ return this instanceof __await ? (this.v = v, this) : new __await(v);
+ };
+
+ __asyncGenerator = function (thisArg, _arguments, generator) {
+ if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
+ var g = generator.apply(thisArg, _arguments || []), i, q = [];
+ return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
+ function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
+ function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
+ function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
+ function fulfill(value) { resume("next", value); }
+ function reject(value) { resume("throw", value); }
+ function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
+ };
+
+ __asyncDelegator = function (o) {
+ var i, p;
+ return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i;
+ function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; }
+ };
+
+ __asyncValues = function (o) {
+ if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
+ var m = o[Symbol.asyncIterator], i;
+ return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
+ function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
+ function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
+ };
+
+ __makeTemplateObject = function (cooked, raw) {
+ if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; }
+ return cooked;
+ };
+
+ __importStar = function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
+ result["default"] = mod;
+ return result;
+ };
+
+ __importDefault = function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+ };
+
+ __classPrivateFieldGet = function (receiver, privateMap) {
+ if (!privateMap.has(receiver)) {
+ throw new TypeError("attempted to get private field on non-instance");
+ }
+ return privateMap.get(receiver);
+ };
+
+ __classPrivateFieldSet = function (receiver, privateMap, value) {
+ if (!privateMap.has(receiver)) {
+ throw new TypeError("attempted to set private field on non-instance");
+ }
+ privateMap.set(receiver, value);
+ return value;
+ };
+
+ exporter("__extends", __extends);
+ exporter("__assign", __assign);
+ exporter("__rest", __rest);
+ exporter("__decorate", __decorate);
+ exporter("__param", __param);
+ exporter("__metadata", __metadata);
+ exporter("__awaiter", __awaiter);
+ exporter("__generator", __generator);
+ exporter("__exportStar", __exportStar);
+ exporter("__createBinding", __createBinding);
+ exporter("__values", __values);
+ exporter("__read", __read);
+ exporter("__spread", __spread);
+ exporter("__spreadArrays", __spreadArrays);
+ exporter("__await", __await);
+ exporter("__asyncGenerator", __asyncGenerator);
+ exporter("__asyncDelegator", __asyncDelegator);
+ exporter("__asyncValues", __asyncValues);
+ exporter("__makeTemplateObject", __makeTemplateObject);
+ exporter("__importStar", __importStar);
+ exporter("__importDefault", __importDefault);
+ exporter("__classPrivateFieldGet", __classPrivateFieldGet);
+ exporter("__classPrivateFieldSet", __classPrivateFieldSet);
+});
+
/***/ }),
-/* 388 */
+/* 423 */
/***/ (function(module, __unusedexports, __webpack_require__) {
-/**
- * Detect Electron renderer process, which is node, but we should
- * treat as a browser.
- */
+// Generated by CoffeeScript 1.12.7
+(function() {
+ var NodeType, WriterState, XMLCData, XMLComment, XMLDTDAttList, XMLDTDElement, XMLDTDEntity, XMLDTDNotation, XMLDeclaration, XMLDocType, XMLDummy, XMLElement, XMLProcessingInstruction, XMLRaw, XMLText, XMLWriterBase, assign,
+ hasProp = {}.hasOwnProperty;
-if (typeof process === 'undefined' || process.type === 'renderer') {
- module.exports = __webpack_require__(592);
-} else {
- module.exports = __webpack_require__(161);
-}
+ assign = __webpack_require__(582).assign;
+ NodeType = __webpack_require__(683);
-/***/ }),
-/* 389 */,
-/* 390 */,
-/* 391 */
-/***/ (function(module, __unusedexports, __webpack_require__) {
+ XMLDeclaration = __webpack_require__(738);
-var current = (process.versions && process.versions.node && process.versions.node.split('.')) || [];
+ XMLDocType = __webpack_require__(735);
-function specifierIncluded(specifier) {
- var parts = specifier.split(' ');
- var op = parts.length > 1 ? parts[0] : '=';
- var versionParts = (parts.length > 1 ? parts[1] : parts[0]).split('.');
+ XMLCData = __webpack_require__(657);
- for (var i = 0; i < 3; ++i) {
- var cur = Number(current[i] || 0);
- var ver = Number(versionParts[i] || 0);
- if (cur === ver) {
- continue; // eslint-disable-line no-restricted-syntax, no-continue
- }
- if (op === '<') {
- return cur < ver;
- } else if (op === '>=') {
- return cur >= ver;
- } else {
- return false;
- }
- }
- return op === '>=';
-}
+ XMLComment = __webpack_require__(919);
-function matchesRange(range) {
- var specifiers = range.split(/ ?&& ?/);
- if (specifiers.length === 0) { return false; }
- for (var i = 0; i < specifiers.length; ++i) {
- if (!specifierIncluded(specifiers[i])) { return false; }
- }
- return true;
-}
+ XMLElement = __webpack_require__(796);
-function versionIncluded(specifierValue) {
- if (typeof specifierValue === 'boolean') { return specifierValue; }
- if (specifierValue && typeof specifierValue === 'object') {
- for (var i = 0; i < specifierValue.length; ++i) {
- if (matchesRange(specifierValue[i])) { return true; }
- }
- return false;
- }
- return matchesRange(specifierValue);
-}
+ XMLRaw = __webpack_require__(660);
-var data = __webpack_require__(656);
+ XMLText = __webpack_require__(708);
-var core = {};
-for (var mod in data) { // eslint-disable-line no-restricted-syntax
- if (Object.prototype.hasOwnProperty.call(data, mod)) {
- core[mod] = versionIncluded(data[mod]);
- }
-}
-module.exports = core;
+ XMLProcessingInstruction = __webpack_require__(491);
+ XMLDummy = __webpack_require__(956);
-/***/ }),
-/* 392 */,
-/* 393 */
-/***/ (function(__unusedmodule, exports, __webpack_require__) {
+ XMLDTDAttList = __webpack_require__(890);
-"use strict";
-/*!
- * Copyright (c) 2015, Salesforce.com, Inc.
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are met:
- *
- * 1. Redistributions of source code must retain the above copyright notice,
- * this list of conditions and the following disclaimer.
- *
- * 2. Redistributions in binary form must reproduce the above copyright notice,
- * this list of conditions and the following disclaimer in the documentation
- * and/or other materials provided with the distribution.
- *
- * 3. Neither the name of Salesforce.com nor the names of its contributors may
- * be used to endorse or promote products derived from this software without
- * specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
- * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
- * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
- * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
- * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
- * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
- * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
- * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
- * POSSIBILITY OF SUCH DAMAGE.
- */
+ XMLDTDElement = __webpack_require__(463);
-const punycode = __webpack_require__(213);
-const urlParse = __webpack_require__(835).parse;
-const util = __webpack_require__(669);
-const pubsuffix = __webpack_require__(562);
-const Store = __webpack_require__(338).Store;
-const MemoryCookieStore = __webpack_require__(332).MemoryCookieStore;
-const pathMatch = __webpack_require__(348).pathMatch;
-const VERSION = __webpack_require__(460);
-const { fromCallback } = __webpack_require__(147);
+ XMLDTDEntity = __webpack_require__(661);
-// From RFC6265 S4.1.1
-// note that it excludes \x3B ";"
-const COOKIE_OCTETS = /^[\x21\x23-\x2B\x2D-\x3A\x3C-\x5B\x5D-\x7E]+$/;
+ XMLDTDNotation = __webpack_require__(19);
-const CONTROL_CHARS = /[\x00-\x1F]/;
+ WriterState = __webpack_require__(541);
-// From Chromium // '\r', '\n' and '\0' should be treated as a terminator in
-// the "relaxed" mode, see:
-// https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/parsed_cookie.cc#L60
-const TERMINATORS = ["\n", "\r", "\0"];
+ module.exports = XMLWriterBase = (function() {
+ function XMLWriterBase(options) {
+ var key, ref, value;
+ options || (options = {});
+ this.options = options;
+ ref = options.writer || {};
+ for (key in ref) {
+ if (!hasProp.call(ref, key)) continue;
+ value = ref[key];
+ this["_" + key] = this[key];
+ this[key] = value;
+ }
+ }
-// RFC6265 S4.1.1 defines path value as 'any CHAR except CTLs or ";"'
-// Note ';' is \x3B
-const PATH_VALUE = /[\x20-\x3A\x3C-\x7E]+/;
+ XMLWriterBase.prototype.filterOptions = function(options) {
+ var filteredOptions, ref, ref1, ref2, ref3, ref4, ref5, ref6;
+ options || (options = {});
+ options = assign({}, this.options, options);
+ filteredOptions = {
+ writer: this
+ };
+ filteredOptions.pretty = options.pretty || false;
+ filteredOptions.allowEmpty = options.allowEmpty || false;
+ filteredOptions.indent = (ref = options.indent) != null ? ref : ' ';
+ filteredOptions.newline = (ref1 = options.newline) != null ? ref1 : '\n';
+ filteredOptions.offset = (ref2 = options.offset) != null ? ref2 : 0;
+ filteredOptions.dontPrettyTextNodes = (ref3 = (ref4 = options.dontPrettyTextNodes) != null ? ref4 : options.dontprettytextnodes) != null ? ref3 : 0;
+ filteredOptions.spaceBeforeSlash = (ref5 = (ref6 = options.spaceBeforeSlash) != null ? ref6 : options.spacebeforeslash) != null ? ref5 : '';
+ if (filteredOptions.spaceBeforeSlash === true) {
+ filteredOptions.spaceBeforeSlash = ' ';
+ }
+ filteredOptions.suppressPrettyCount = 0;
+ filteredOptions.user = {};
+ filteredOptions.state = WriterState.None;
+ return filteredOptions;
+ };
-// date-time parsing constants (RFC6265 S5.1.1)
+ XMLWriterBase.prototype.indent = function(node, options, level) {
+ var indentLevel;
+ if (!options.pretty || options.suppressPrettyCount) {
+ return '';
+ } else if (options.pretty) {
+ indentLevel = (level || 0) + options.offset + 1;
+ if (indentLevel > 0) {
+ return new Array(indentLevel).join(options.indent);
+ }
+ }
+ return '';
+ };
-const DATE_DELIM = /[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]/;
+ XMLWriterBase.prototype.endline = function(node, options, level) {
+ if (!options.pretty || options.suppressPrettyCount) {
+ return '';
+ } else {
+ return options.newline;
+ }
+ };
-const MONTH_TO_NUM = {
- jan: 0,
- feb: 1,
- mar: 2,
- apr: 3,
- may: 4,
- jun: 5,
- jul: 6,
- aug: 7,
- sep: 8,
- oct: 9,
- nov: 10,
- dec: 11
-};
+ XMLWriterBase.prototype.attribute = function(att, options, level) {
+ var r;
+ this.openAttribute(att, options, level);
+ r = ' ' + att.name + '="' + att.value + '"';
+ this.closeAttribute(att, options, level);
+ return r;
+ };
-const MAX_TIME = 2147483647000; // 31-bit max
-const MIN_TIME = 0; // 31-bit min
-const SAME_SITE_CONTEXT_VAL_ERR =
- 'Invalid sameSiteContext option for getCookies(); expected one of "strict", "lax", or "none"';
+ XMLWriterBase.prototype.cdata = function(node, options, level) {
+ var r;
+ this.openNode(node, options, level);
+ options.state = WriterState.OpenTag;
+ r = this.indent(node, options, level) + '' + this.endline(node, options, level);
+ options.state = WriterState.None;
+ this.closeNode(node, options, level);
+ return r;
+ };
-function checkSameSiteContext(value) {
- const context = String(value).toLowerCase();
- if (context === "none" || context === "lax" || context === "strict") {
- return context;
- } else {
- return null;
- }
-}
+ XMLWriterBase.prototype.comment = function(node, options, level) {
+ var r;
+ this.openNode(node, options, level);
+ options.state = WriterState.OpenTag;
+ r = this.indent(node, options, level) + '' + this.endline(node, options, level);
+ options.state = WriterState.None;
+ this.closeNode(node, options, level);
+ return r;
+ };
-const PrefixSecurityEnum = Object.freeze({
- SILENT: "silent",
- STRICT: "strict",
- DISABLED: "unsafe-disabled"
-});
+ XMLWriterBase.prototype.declaration = function(node, options, level) {
+ var r;
+ this.openNode(node, options, level);
+ options.state = WriterState.OpenTag;
+ r = this.indent(node, options, level) + '';
+ r += this.endline(node, options, level);
+ options.state = WriterState.None;
+ this.closeNode(node, options, level);
+ return r;
+ };
-// Dumped from ip-regex@4.0.0, with the following changes:
-// * all capturing groups converted to non-capturing -- "(?:)"
-// * support for IPv6 Scoped Literal ("%eth1") removed
-// * lowercase hexadecimal only
-var IP_REGEX_LOWERCASE =/(?:^(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}$)|(?:^(?:(?:[a-f\d]{1,4}:){7}(?:[a-f\d]{1,4}|:)|(?:[a-f\d]{1,4}:){6}(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|:[a-f\d]{1,4}|:)|(?:[a-f\d]{1,4}:){5}(?::(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,2}|:)|(?:[a-f\d]{1,4}:){4}(?:(?::[a-f\d]{1,4}){0,1}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,3}|:)|(?:[a-f\d]{1,4}:){3}(?:(?::[a-f\d]{1,4}){0,2}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,4}|:)|(?:[a-f\d]{1,4}:){2}(?:(?::[a-f\d]{1,4}){0,3}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,5}|:)|(?:[a-f\d]{1,4}:){1}(?:(?::[a-f\d]{1,4}){0,4}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,6}|:)|(?::(?:(?::[a-f\d]{1,4}){0,5}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,7}|:)))$)/;
+ XMLWriterBase.prototype.docType = function(node, options, level) {
+ var child, i, len, r, ref;
+ level || (level = 0);
+ this.openNode(node, options, level);
+ options.state = WriterState.OpenTag;
+ r = this.indent(node, options, level);
+ r += ' 0) {
+ r += ' [';
+ r += this.endline(node, options, level);
+ options.state = WriterState.InsideTag;
+ ref = node.children;
+ for (i = 0, len = ref.length; i < len; i++) {
+ child = ref[i];
+ r += this.writeChildNode(child, options, level + 1);
+ }
+ options.state = WriterState.CloseTag;
+ r += ']';
+ }
+ options.state = WriterState.CloseTag;
+ r += options.spaceBeforeSlash + '>';
+ r += this.endline(node, options, level);
+ options.state = WriterState.None;
+ this.closeNode(node, options, level);
+ return r;
+ };
-/*
- * Parses a Natural number (i.e., non-negative integer) with either the
- *